diff --git a/network_wrangler/logger.py b/network_wrangler/logger.py index ac74fb87..1474abb5 100644 --- a/network_wrangler/logger.py +++ b/network_wrangler/logger.py @@ -21,7 +21,7 @@ def setupLogging( level: the level of logging that will be recorded log_filename: the location of the log file that will get created to add the DEBUG log log_to_console: if True, logging will go to the console at INFO level - """ + """ if level is None: WranglerLogger.setLevel(logging.DEBUG) diff --git a/network_wrangler/projectcard.py b/network_wrangler/projectcard.py index b1687bf0..3a17b506 100644 --- a/network_wrangler/projectcard.py +++ b/network_wrangler/projectcard.py @@ -39,10 +39,10 @@ class ProjectCard(object): def __init__(self, attribute_dictonary: dict): """ - Constructor for Project Card object. + Constructor for Project Card object. args: - attribute_dictonary: a nested dictionary of project card attributes. + attribute_dictonary: a nested dictionary of project card attributes. """ # add these first so they are first on write out self.project = None @@ -64,8 +64,8 @@ def read(card_filename: str, validate: bool = True): Reads and validates a Project card args: - card_filename: The path to the project card file. - validate: Boolean indicating if the project card should be validated. Defaults to True. + card_filename: The path to the project card file. + validate: Boolean indicating if the project card should be validated. Defaults to True. Returns a Project Card object """ @@ -124,7 +124,7 @@ def read_yml(card_filename: str) -> dict: Reads "normal" wrangler project cards defined in YAML. Args: - card_filename: file location where the project card is. + card_filename: file location where the project card is. Returns: Attribute Dictionary for Project Card """ @@ -141,8 +141,8 @@ def write(self, out_filename: str = None): Writes project card dictionary to YAML file. args: - out_filename: file location to write the project card object as yml. - If not provided, will write to current directory using the project name as the filename. + out_filename: file location to write the project card object as yml. + If not provided, will write to current directory using the project name as the filename. """ if not out_filename: from network_wrangler.utils import make_slug @@ -164,12 +164,11 @@ def write(self, out_filename: str = None): @staticmethod def validate_project_card_schema( - card_filename: str, - card_schema_filename: str = "project_card.json" + card_filename: str, card_schema_filename: str = "project_card.json" ) -> bool: """ Tests project card schema validity by evaluating if it conforms to the schemas - + args: card_filename: location of project card .yml file card_schema_filename: location of project card schema to validate against. Defaults to project_card.json. @@ -195,12 +194,16 @@ def validate_project_card_schema( except ValidationError as exc: WranglerLogger.error("Failed Project Card validation: Validation Error") WranglerLogger.error("Project Card File Loc:{}".format(card_filename)) - WranglerLogger.error("Project Card Schema Loc:{}".format(card_schema_filename)) + WranglerLogger.error( + "Project Card Schema Loc:{}".format(card_schema_filename) + ) WranglerLogger.error(exc.message) except SchemaError as exc: WranglerLogger.error("Failed Project Card schema validation: Schema Error") - WranglerLogger.error("Project Card Schema Loc:{}".format(card_schema_filename)) + WranglerLogger.error( + "Project Card Schema Loc:{}".format(card_schema_filename) + ) WranglerLogger.error(exc.message) except yaml.YAMLError as exc: @@ -233,7 +236,7 @@ def build_link_selection_query( selection_keys = [k for l in selection["link"] for k, v in l.items()] - unique_link_ids_sel = list( set(unique_link_ids) & set(selection_keys) ) + unique_link_ids_sel = list(set(unique_link_ids) & set(selection_keys)) for l in selection["link"]: for key, value in l.items(): @@ -241,10 +244,7 @@ def build_link_selection_query( if key in ignore: continue - if ( - unique_link_ids_sel - and key not in unique_link_ids - ): + if unique_link_ids_sel and key not in unique_link_ids: continue count = count + 1 @@ -269,10 +269,7 @@ def build_link_selection_query( ): sel_query = sel_query + " and " - if ( - unique_link_ids_sel - and count != len(unique_link_ids_sel) - ): + if unique_link_ids_sel and count != len(unique_link_ids_sel): sel_query = sel_query + " and " if not unique_link_ids_sel: diff --git a/network_wrangler/roadwaynetwork.py b/network_wrangler/roadwaynetwork.py index 89e0abae..dccd146b 100644 --- a/network_wrangler/roadwaynetwork.py +++ b/network_wrangler/roadwaynetwork.py @@ -8,7 +8,7 @@ import copy import numbers from random import randint -from typing import Union +from typing import Union, Mapping, Collection, List, Optional, Any import folium import pandas as pd @@ -168,11 +168,13 @@ class RoadwayNetwork(object): shape_foreign_key (str): variable linking the links table and shape table unique_link_ids (list): list of variables unique to each link unique_node_ids (list): list of variables unique to each node - modes_to_network_link_variables (dict): Mapping of modes to link variables in the network - modes_to_network_nodes_variables (dict): Mapping of modes to node variables in the network - managed_lanes_node_id_scalar (int): Scalar values added to primary keys for nodes for + unique_link_key (str): variable used for linking link tables to each other + unique_node_key (str): variable used for linking node tables to each other + modes_to_network_link_variables (dict): Mapping of modes to link variables in the network + modes_to_network_nodes_variables (dict): Mapping of modes to node variables in the network + managed_lanes_node_id_scalar (int): Scalar values added to primary keys for nodes for corresponding managed lanes. - managed_lanes_link_id_scalar (int): Scalar values added to primary keys for links for + managed_lanes_link_id_scalar (int): Scalar values added to primary keys for links for corresponding managed lanes. managed_lanes_required_attributes (list): attributes that must be specified in managed lane projects. @@ -184,47 +186,66 @@ class RoadwayNetwork(object): def __init__( self, - nodes: GeoDataFrame, - links: GeoDataFrame, - shapes: GeoDataFrame = None, - node_foreign_key: str = None, - link_foreign_key: str = None, - shape_foreign_key: str = None, - unique_link_ids: list = None, - unique_node_ids: list = None, - crs: int = None, + nodes_df: GeoDataFrame, + links_df: GeoDataFrame, + shapes_df: GeoDataFrame = None, + node_foreign_key: str = NODE_FOREIGN_KEY, + link_foreign_key: str = LINK_FOREIGN_KEY, + shape_foreign_key: str = SHAPE_FOREIGN_KEY, + unique_link_key: str = UNIQUE_LINK_KEY, + unique_node_key: str = UNIQUE_NODE_KEY, + unique_link_ids: list = UNIQUE_LINK_IDS, + unique_node_ids: list = UNIQUE_NODE_IDS, + crs: int = CRS, + modes_to_network_link_variables: Mapping[str,Collection] = MODES_TO_NETWORK_LINK_VARIABLES, + modes_to_network_node_variables: Mapping[str,Collection] = MODES_TO_NETWORK_NODE_VARIABLES, + managed_lanes_link_id_scalar: int = MANAGED_LANES_LINK_ID_SCALAR, + managed_lanes_node_id_scalar: int = MANAGED_LANES_NODE_ID_SCALAR, + managed_lanes_required_attributes: list = MANAGED_LANES_REQUIRED_ATTRIBUTES, + keep_same_attributes_ml_and_gp: list = KEEP_SAME_ATTRIBUTES_ML_AND_GP, **kwargs, ): """ Constructor """ - inputs_valid = [isinstance(x,GeoDataFrame) for x in (nodes, links, shapes)] - if False in inputs_valid: - raise(TypeError("Input nodes ({}), links ({})or shapes ({}) not of required type GeoDataFrame".format(inputs_valid))) - - self.nodes_df = nodes - self.links_df = links - self.shapes_df = shapes - - self.node_foreign_key = NODE_FOREIGN_KEY if node_foreign_key is None else node_foreign_key - self.link_foreign_key = LINK_FOREIGN_KEY if link_foreign_key is None else link_foreign_key - self.shape_foreign_key = SHAPE_FOREIGN_KEY if shape_foreign_key is None else shape_foreign_key + inputs_valid_types = [isinstance(x, GeoDataFrame) for x in (nodes_df, links_df, shapes_df)] + if False in inputs_valid_types: + raise ( + TypeError( + "Input nodes ({}), links ({})or shapes ({}) not of required type GeoDataFrame".format( + *[type(x) for x in (nodes_df, links_df, shapes_df)] + ) + ) + ) - self.unique_link_ids = UNIQUE_LINK_IDS if unique_link_ids is None else unique_link_ids - self.unique_node_ids = UNIQUE_NODE_IDS if unique_node_ids is None else unique_node_ids + self.nodes_df = nodes_df + self.links_df = links_df + self.shapes_df = shapes_df + + self.node_foreign_key = node_foreign_key + self.link_foreign_key = link_foreign_key + self.shape_foreign_key = shape_foreign_key + self.unique_link_key = unique_link_key + self.unique_node_key = unique_node_key + self.unique_link_ids = unique_link_ids + self.unique_node_ids = unique_node_ids + self.crs = crs + self.modes_to_network_link_variables = modes_to_network_link_variables + self.modes_to_network_node_variables = modes_to_network_node_variables + self.managed_lanes_link_id_scalar = managed_lanes_link_id_scalar + self.managed_lanes_node_id_scalar = managed_lanes_node_id_scalar + self.managed_lanes_required_attributes = managed_lanes_required_attributes + self.keep_same_attributes_ml_and_gp = keep_same_attributes_ml_and_gp - self.crs = CRS if crs is None else crs + self.selections = {} self.__dict__.update(kwargs) - # Add non-required fields if they aren't there. - # for field, default_value in RoadwayNetwork.OPTIONAL_FIELDS: - # if field not in self.links_df.columns: - # self.links_df[field] = default_value + #WranglerLogger.debug("SELF.__DICT__: {}".format("\n-".join(self.__dict__))) + if not self.validate_uniqueness(): raise ValueError("IDs in network not unique") - self.selections = {} - + @staticmethod def read( link_filename: str, @@ -232,19 +253,7 @@ def read( shape_filename: str, fast: bool = True, crs: int = CRS, - node_foreign_key: str = NODE_FOREIGN_KEY, - link_foreign_key: list = LINK_FOREIGN_KEY, - shape_foreign_key: str = SHAPE_FOREIGN_KEY, - unique_link_key: str = UNIQUE_LINK_KEY, - unique_node_key: str = UNIQUE_NODE_KEY, - unique_link_ids: list = UNIQUE_LINK_IDS, - unique_node_ids: list = UNIQUE_NODE_IDS, - modes_to_network_link_variables: dict = MODES_TO_NETWORK_LINK_VARIABLES, - modes_to_network_nodes_variables: dict = MODES_TO_NETWORK_NODE_VARIABLES, - managed_lanes_link_id_scalar: int = MANAGED_LANES_LINK_ID_SCALAR, - managed_lanes_node_id_scalar: int = MANAGED_LANES_NODE_ID_SCALAR, - managed_lanes_required_attributes: list = MANAGED_LANES_REQUIRED_ATTRIBUTES, - keep_same_attributes_ml_and_gp: list = KEEP_SAME_ATTRIBUTES_ML_AND_GP, + **kwargs, ) -> RoadwayNetwork: """ Reads a network from the roadway network standard @@ -256,60 +265,39 @@ def read( shape_filename: full path to the shape file fast: boolean that will skip validation to speed up read time crs: coordinate reference system, ESPG number - node_foreign_key: variable linking the node table to the link table. - link_foreign_key: - shape_foreign_key: - unique_link_ids: - unique_node_ids: - modes_to_network_link_variables: - modes_to_network_nodes_variables: - managed_lanes_node_id_scalar: - managed_lanes_link_id_scalar: - managed_lanes_required_attributes: - keep_same_attributes_ml_and_gp: - - Returns: a RoadwayNetwork instance + + Returns: RoadwayNetwork .. todo:: Turn off fast=True as default """ WranglerLogger.info("Reading RoadwayNetwork") - nodes_df,links_df,shapes_df = RoadwayNetwork.load_transform_network( + nodes_df, links_df, shapes_df = RoadwayNetwork.load_transform_network( node_filename, link_filename, shape_filename, - crs = crs, - node_foreign_key = node_foreign_key, - validate_schema = not fast, + validate_schema=not fast, + **kwargs, ) roadway_network = RoadwayNetwork( nodes=nodes_df, links=links_df, shapes=shapes_df, - crs=crs, - node_foreign_key=node_foreign_key, - link_foreign_key=link_foreign_key, - shape_foreign_key=shape_foreign_key, - unique_link_ids=unique_link_ids, - unique_node_ids=unique_node_ids, - unique_link_key=unique_link_key, - unique_node_key=unique_node_key, - modes_to_network_link_variables=modes_to_network_link_variables, - modes_to_network_nodes_variables=modes_to_network_nodes_variables, - link_filename = link_filename, - node_filename = node_filename, - shape_filename = shape_filename, + link_filename=link_filename, + node_filename=node_filename, + shape_filename=shape_filename, + **kwargs, ) return roadway_network @staticmethod def load_transform_network( - node_filename: str, - link_filename: str, - shape_filename: str, + node_filename: str, + link_filename: str, + shape_filename: str, crs: int = CRS, node_foreign_key: str = NODE_FOREIGN_KEY, validate_schema: bool = True, @@ -318,14 +306,14 @@ def load_transform_network( """ Reads roadway network files from disk and transforms them into GeoDataFrames. - args: + args: node_filename: file name for nodes. link_filename: file name for links. shape_filename: file name for shapes. crs: coordinate reference system. Defaults to value in CRS. - node_foreign_key: variable linking the node table to the link table. Defaults + node_foreign_key: variable linking the node table to the link table. Defaults to NODE_FOREIGN_KEY. - validate_schema: boolean indicating if network should be validated to schema. + validate_schema: boolean indicating if network should be validated to schema. returns: tuple of GeodataFrames nodes_df, links_df, shapes_df """ @@ -381,12 +369,12 @@ def load_transform_network( [g["properties"] for g in node_geojson["features"]] ) - if node_foreign_key not in node_properties_df.columns: - raise ValueError("Specified `node_foreign_key`: {} not found in {}. Available properties: {}".format( - node_foreign_key, - node_filename, - node_properties_df.columns - )) + if node_foreign_key not in list(node_properties_df.columns): + raise ValueError( + "Specified `node_foreign_key`: {} not found in {}. Available properties: {}".format( + node_foreign_key, node_filename, node_properties_df.columns + ) + ) node_geometries = [ Point(g["geometry"]["coordinates"]) for g in node_geojson["features"] @@ -411,7 +399,6 @@ def load_transform_network( return nodes_df, links_df, shapes_df - def write(self, path: str = ".", filename: str = None) -> None: """ Writes a network in the roadway network standard @@ -776,8 +763,8 @@ def ox_graph( Args: nodes_df : GeoDataFrame of nodes link_df : GeoDataFrame of links - node_foreign_key: field referenced in `link_foreign_key` - link_foreign_key: list of attributes that define the link start and end nodes to the node foreign key + node_foreign_key: field referenced in `link_foreign_key` + link_foreign_key: list of attributes that define the link start and end nodes to the node foreign key unique_link_key: primary key for links Returns: a networkx multidigraph @@ -827,13 +814,11 @@ def selection_has_unique_link_id( of roadway features, containing a "link" key. Returns: A boolean indicating if the selection dictionary contains - a unique identifier for links. + a unique identifier for links. """ selection_keys = [k for l in selection_dict["link"] for k, v in l.items()] - return bool( - set(self.unique_link_ids) & set(selection_keys) - ) + return bool(set(self.unique_link_ids) & set(selection_keys)) def build_selection_key(self, selection_dict: dict) -> tuple: """ @@ -861,8 +846,7 @@ def build_selection_key(self, selection_dict: dict) -> tuple: def _get_fk_nodes( _links: gpd.GeoDataFrame, link_foreign_key: list = LINK_FOREIGN_KEY ): - """Find the nodes for the candidate links. - """ + """Find the nodes for the candidate links.""" _n = list(set([i for fk in link_foreign_key for i in list(_links[fk])])) # WranglerLogger.debug("Node foreign key list: {}".format(_n)) return _n @@ -970,7 +954,7 @@ def path_search( D_id: destination node foreigh key ID weight_column: column to use for weight of shortest path. Defaults to "i" (iteration) weight_factor: optional weight to multiply the weight column by when finding the shortest path - search_breadth: + search_breadth: Returns @@ -1065,7 +1049,10 @@ def _add_breadth( ) i += 1 candidate_links_df, node_list_foreign_keys = _add_breadth( - candidate_links_df, self.nodes_df, self.links_df, i=i, + candidate_links_df, + self.nodes_df, + self.links_df, + i=i, ) # ----------------------------------- # Once have A and B in graph, @@ -1117,11 +1104,11 @@ def _add_breadth( raise NoPathFound(msg) def select_roadway_features( - self, - selection: dict, - search_mode="drive", + self, + selection: dict, + search_mode="drive", force_search=False, - sp_weight_factor = None, + sp_weight_factor=None, ) -> GeoDataFrame: """ Selects roadway features that satisfy selection criteria @@ -1145,11 +1132,11 @@ def select_roadway_features( B - to node link - which includes at least a variable for `name` search_mode: mode which you are searching for; defaults to "drive" - force_search: boolean directing method to perform search even if one + force_search: boolean directing method to perform search even if one with same selection dict is stored from a previous search. - sp_weight_factor: multiple used to discourage shortest paths which - meander from original search returned from name or ref query. - If not set here, will default to value of sp_weight_factor in + sp_weight_factor: multiple used to discourage shortest paths which + meander from original search returned from name or ref query. + If not set here, will default to value of sp_weight_factor in RoadwayNetwork instance. If not set there, will defaul to SP_WEIGHT_FACTOR. Returns: a list of link IDs in selection @@ -1178,7 +1165,9 @@ def select_roadway_features( self.selections[sel_key] = {} self.selections[sel_key]["selection_found"] = False - unique_link_identifer_in_selection = self.selection_has_unique_link_id(selection) + unique_link_identifer_in_selection = self.selection_has_unique_link_id( + selection + ) if not unique_link_identifer_in_selection: A_id, B_id = self.orig_dest_nodes_foreign_key(selection) @@ -1587,7 +1576,7 @@ def apply_managed_lane_feature_change( self.links_df.at[idx, attribute] = attr_value else: - if i == 1: + if p == 1: updated_network = copy.deepcopy(self) if attribute in self.links_df.columns and not isinstance( @@ -1840,7 +1829,7 @@ def get_property_by_time_period_and_group( list of group categories in order of search, i.e. ["hov3","hov2"] - default_return: what to return if variable or time period not found. Default is None. + default_return: what to return if variable or time period not found. Default is None. returns -------- @@ -1848,8 +1837,12 @@ def get_property_by_time_period_and_group( """ if prop not in list(self.links_df.columns): - WranglerLogger.warning("Property {} not in links to split, returning as default value: {}".format(prop, default_value)) - return pd.Series([default_return]*len(self.link_df)) + WranglerLogger.warning( + "Property {} not in links to split, returning as default value: {}".format( + prop, default_return + ) + ) + return pd.Series([default_return] * len(self.links_df)) def _get_property( v, @@ -1885,7 +1878,7 @@ def _get_property( if "default" in v.keys(): return v["default"] else: - WranglerLogger.debug("variable: ".format(v)) + WranglerLogger.debug("variable: {}".format(v)) msg = "Variable {} is more complex in network than query".format(v) WranglerLogger.error(msg) raise ValueError(msg) @@ -2004,26 +1997,26 @@ def _get_property( ) def update_distance( - self, - links_df: GeoDataFrame = None, + self, + links_df: GeoDataFrame = None, use_shapes: bool = False, units: str = "miles", - network_variable: str = "distance", + network_variable: str = "distance", overwrite: bool = True, - inplace = True + inplace=True, ): """ Calculate link distance in specified units to network variable using either straight line - distance or (if specified) shape distance if available. + distance or (if specified) shape distance if available. Args: - links_df: Links GeoDataFrame. Useful if want to update a portion of network links + links_df: Links GeoDataFrame. Useful if want to update a portion of network links (i.e. only centroid connectors). If not provided, will use entire self.links_df. - use_shapes: if True, will add length information from self.shapes_df rather than crow-fly. - If no corresponding shape found in self.shapes_df, will default to crow-fly. - units: units to use. Defaults to the standard unit of miles. Available units: "meters", "miles". - network_variable: variable to store link distance in. Defaults to "distance". - overwrite: Defaults to True and will overwrite all existing calculated distances. + use_shapes: if True, will add length information from self.shapes_df rather than crow-fly. + If no corresponding shape found in self.shapes_df, will default to crow-fly. + units: units to use. Defaults to the standard unit of miles. Available units: "meters", "miles". + network_variable: variable to store link distance in. Defaults to "distance". + overwrite: Defaults to True and will overwrite all existing calculated distances. False will only update NaNs. inplace: updates self.links_df @@ -2031,15 +2024,17 @@ def update_distance( links_df with updated distance """ - if units not in ["miles","meters"]: + if units not in ["miles", "meters"]: raise NotImplementedError if links_df is None: links_df = self.links_df.copy() msg = "Update distance in {} to variable: {}".format(units, network_variable) - if overwrite: msg + "\n - overwriting existing calculated values if found." - if use_shapes: msg + "\n - using shapes_df length if found." + if overwrite: + msg + "\n - overwriting existing calculated values if found." + if use_shapes: + msg + "\n - using shapes_df length if found." WranglerLogger.debug(msg) """ @@ -2047,26 +2042,31 @@ def update_distance( """ temp_links_gdf = links_df.copy() - temp_links_gdf.crs = "EPSG:4326" - temp_links_gdf = temp_links_gdf.to_crs(epsg=26915) #in meters + temp_links_gdf.crs = "EPSG:4326" + temp_links_gdf = temp_links_gdf.to_crs(epsg=26915) # in meters - conversion_from_meters = {"miles": 1/1609.34, "meters": 1} - temp_links_gdf[network_variable] = temp_links_gdf.geometry.length * conversion_from_meters[units] + conversion_from_meters = {"miles": 1 / 1609.34, "meters": 1} + temp_links_gdf[network_variable] = ( + temp_links_gdf.geometry.length * conversion_from_meters[units] + ) if use_shapes: _needed_shapes_gdf = self.shapes_df.loc[ - self.shapes_df[self.shape_foreign_key] in links_df[self.shape_foreign_key] + self.shapes_df[self.shape_foreign_key] + in links_df[self.shape_foreign_key] ].copy() _needed_shapes_gdf = _needed_shapes_gdf.to_crs(epsg=26915) - _needed_shapes_gdf[network_variable] = _needed_shapes_gdf.geometry.length * conversion_from_meters[units] + _needed_shapes_gdf[network_variable] = ( + _needed_shapes_gdf.geometry.length * conversion_from_meters[units] + ) temp_links_gdf = update_df( temp_links_gdf, _needed_shapes_gdf, - merge_key = self.shape_foreign_key, - update_fields = [network_variable], - method = "update if found", + merge_key=self.shape_foreign_key, + update_fields=[network_variable], + method="update if found", ) if overwrite: @@ -2075,17 +2075,18 @@ def update_distance( links_df = update_df( links_df, temp_links_gdf, - merge_key = self.unique_link_key, - update_fields = [network_variable], - method = "update nan", + merge_key=self.unique_link_key, + update_fields=[network_variable], + method="update nan", ) if inplace: self.links_df = links_df else: return links_df - + def create_dummy_connector_links( + self, gp_df: GeoDataFrame, ml_df: GeoDataFrame, access_lanes: int = 1, @@ -2195,13 +2196,21 @@ def _get_connector_references(ref_1: list, ref_2: list, type: str): @staticmethod def has_managed_lanes(links_df: GeoDataFrame) -> bool: - ml_lanes_attributes = [i for i in self.links_df.columns.values.tolist() if i.lower().startswith("ml_lanes")] + ml_lanes_attributes = [ + i + for i in links_df.columns.values.tolist() + if i.lower().startswith("ml_lanes") + ] if not ml_lanes_attributes: - WranglerLogger.info("No managed lanes attributes found when calculating managed lane network. Returning original network.") + WranglerLogger.info("No managed lanes attributes found.") return False - if not self.links_df[ml_lanes_attributes].max()>0: - WranglerLogger.info("Max number of managed lanes is not greater than zero ({}). Returning original network.".format(self.links_df[ml_lanes_attributes].max())) + if not links_df[ml_lanes_attributes].max() > 0: + WranglerLogger.info( + "Max number of managed lanes is not greater than zero ({}).".format( + links_df[ml_lanes_attributes].max() + ) + ) return False return True @@ -2213,7 +2222,7 @@ def create_managed_lane_network( managed_lanes_node_id_scalar: int = None, managed_lanes_link_id_scalar: int = None, in_place: bool = False, - ) -> Union[None,Collection[GeoDataFrame]: + ) -> Union[None, Collection[GeoDataFrame]]: """ Create a roadway network with managed lanes links separated out. Add new parallel managed lane links, access/egress links, @@ -2234,19 +2243,19 @@ def create_managed_lane_network( managed_lanes_link_id_scalar: integer value added to original link IDs to create managed lane unique ids. If not specified, will look for value in the RoadwayNetwork instance. If not found there, will default to MANAGED_LANES_LINK_ID_SCALAR. - in_place: If True, will update self.links_model_df, self.shapes_df, and self.nodes_model_df. - Otherwise, will return a tuple with self.links_model_df, self.shapes_df, and self.nodes_model_df. + in_place: If True, will update self.links_model_df, self.shapes_df, and self.nodes_model_df. + Otherwise, will return a tuple with self.links_model_df, self.shapes_df, and self.nodes_model_df. - returns: + returns: None if `in_place' True. - A Tuple of self.model_links_df, self.shapes_df, and self.model_nodes_df if `in_place` is False. + A Tuple of self.model_links_df, self.shapes_df, and self.model_nodes_df if `in_place` is False. .. todo:: make this a more rigorous test """ # assess first if you need to run the rest of this by identifying if any managed lane attributes exist if not RoadwayNetwork.has_managed_lanes(self.links_df): if in_place: - return + return else: return self @@ -2254,22 +2263,30 @@ def create_managed_lane_network( # identify parameters to use if not keep_same_attributes_ml_and_gp: - keep_same_attributes_ml_and_gp = self.__dict__.get("keep_same_attributes_ml_and_gp") + keep_same_attributes_ml_and_gp = self.__dict__.get( + "keep_same_attributes_ml_and_gp" + ) if not keep_same_attributes_ml_and_gp: keep_same_attributes_ml_and_gp = KEEP_SAME_ATTRIBUTES_ML_AND_GP if not managed_lanes_required_attributes: - managed_lanes_required_attributes = self.__dict__.get("managed_lanes_required_attributes") + managed_lanes_required_attributes = self.__dict__.get( + "managed_lanes_required_attributes" + ) if not managed_lanes_required_attributes: managed_lanes_required_attributes = MANAGED_LANES_REQUIRED_ATTRIBUTES if not managed_lanes_node_id_scalar: - managed_lanes_node_id_scalar = self.__dict__.get("managed_lanes_node_id_scalar") + managed_lanes_node_id_scalar = self.__dict__.get( + "managed_lanes_node_id_scalar" + ) if not managed_lanes_node_id_scalar: managed_lanes_node_id_scalar = MANAGED_LANES_NODE_ID_SCALAR if not managed_lanes_link_id_scalar: - managed_lanes_link_id_scalar = self.__dict__.get("managed_lanes_link_id_scalar") + managed_lanes_link_id_scalar = self.__dict__.get( + "managed_lanes_link_id_scalar" + ) if not managed_lanes_link_id_scalar: managed_lanes_link_id_scalar = MANAGED_LANES_LINK_ID_SCALAR @@ -2324,7 +2341,7 @@ def create_managed_lane_network( lambda x: create_unique_shape_id(x) ) - access_links_df, egress_links_df = RoadwayNetwork.create_dummy_connector_links( + access_links_df, egress_links_df = self.create_dummy_connector_links( gp_links_df, ml_links_df ) access_links_df["geometry"] = access_links_df["locationReferences"].apply( @@ -2347,10 +2364,7 @@ def create_managed_lane_network( # drop the duplicate links, if Any # could happen when a new egress/access link gets created which already exist - out_links_df = out_links_df.drop_duplicates( - subset=["A", "B"], - keep="last" - ) + out_links_df = out_links_df.drop_duplicates(subset=["A", "B"], keep="last") # only the ml_links_df could potenitally have the new added nodes added_a_nodes = ml_links_df["A"] @@ -2408,8 +2422,7 @@ def create_managed_lane_network( out_shapes_df = out_shapes_df.append(new_shapes_df) out_shapes_df = out_shapes_df.drop_duplicates( - subset=self.shape_foreign_key, - keep="first" + subset=self.shape_foreign_key, keep="first" ) out_links_df = out_links_df.reset_index() @@ -2418,16 +2431,16 @@ def create_managed_lane_network( if in_place: self.model_links_df = out_links_df - self.nodes_df = out_nodes_df + self.nodes_df = out_nodes_df self.shapes_df = out_shapes_df else: return out_links_df, out_nodes_df, out_shapes_df @staticmethod def get_modal_links_nodes( - links_df: DataFrame, - nodes_df: DataFrame, - modes: list[str] = None, + links_df: DataFrame, + nodes_df: DataFrame, + modes: Collection[str] = None, modes_to_network_link_variables: dict = MODES_TO_NETWORK_LINK_VARIABLES, ) -> tuple(DataFrame, DataFrame): """Returns nodes and link dataframes for specific mode. @@ -2437,7 +2450,7 @@ def get_modal_links_nodes( nodes_df: DataFrame of standard network nodes modes: list of the modes of the network to be kept, must be in `drive`,`transit`,`rail`,`bus`, `walk`, `bike`. For example, if bike and walk are selected, both bike and walk links will be kept. - modes_to_network_link_variables: dictionary mapping the mode selections to the network variables + modes_to_network_link_variables: dictionary mapping the mode selections to the network variables that must bool to true to select that mode. Defaults to MODES_TO_NETWORK_LINK_VARIABLES Returns: tuple of DataFrames for links, nodes filtered by mode @@ -2450,7 +2463,8 @@ def get_modal_links_nodes( for mode in modes: if mode not in modes_to_network_link_variables.keys(): msg = "mode value should be one of {}, got {}".format( - list(modes_to_network_link_variables.keys()), mode, + list(modes_to_network_link_variables.keys()), + mode, ) WranglerLogger.error(msg) raise ValueError(msg) @@ -2498,8 +2512,8 @@ def get_modal_links_nodes( @staticmethod def get_modal_graph( - links_df: DataFrame, - nodes_df: DataFrame, + links_df: DataFrame, + nodes_df: DataFrame, mode: str = None, modes_to_network_link_variables: dict = MODES_TO_NETWORK_LINK_VARIABLES, ): @@ -2511,7 +2525,7 @@ def get_modal_graph( nodes_df: DataFrame of standard network nodes mode: mode of the network, one of `drive`,`transit`, `walk`, `bike` - modes_to_network_link_variables: dictionary mapping the mode selections to the network variables + modes_to_network_link_variables: dictionary mapping the mode selections to the network variables that must bool to true to select that mode. Defaults to MODES_TO_NETWORK_LINK_VARIABLES Returns: networkx: osmnx: DiGraph of network @@ -2524,7 +2538,9 @@ def get_modal_graph( raise ValueError(msg) _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - links_df, nodes_df, modes=[mode], + links_df, + nodes_df, + modes=[mode], ) G = RoadwayNetwork.ox_graph(_nodes_df, _links_df) @@ -2553,7 +2569,9 @@ def is_network_connected( if mode: _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - _links_df, _nodes_df, modes=[mode], + _links_df, + _nodes_df, + modes=[mode], ) else: WranglerLogger.info( @@ -2575,7 +2593,7 @@ def add_incident_link_data_to_nodes( links_df: DataFrame = None, nodes_df: DataFrame = None, link_variables: list = [], - unique_node_key = UNIQUE_NODE_KEY, + unique_node_key=UNIQUE_NODE_KEY, ) -> DataFrame: """ Add data from links going to/from nodes to node. @@ -2646,7 +2664,9 @@ def identify_segment_endpoints( if mode: _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - _links_df, _nodes_df, modes=[mode], + _links_df, + _nodes_df, + modes=[mode], ) else: WranglerLogger.warning( @@ -2784,9 +2804,7 @@ def identify_segment_endpoints( WranglerLogger.debug( "{} Segments with at least nodes:\n{}".format( len(_nodes_df), - _nodes_df[ - [self.unique_node_key, "name", "ref", "segment_id"] - ], + _nodes_df[[self.unique_node_key, "name", "ref", "segment_id"]], ) ) @@ -2858,7 +2876,9 @@ def identify_segment( if mode: _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - _links_df, _nodes_df, modes=[mode], + _links_df, + _nodes_df, + modes=[mode], ) else: WranglerLogger.warning( @@ -2935,7 +2955,9 @@ def assess_connectivity( if mode: _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - _links_df, _nodes_df, modes=[mode], + _links_df, + _nodes_df, + modes=[mode], ) else: WranglerLogger.warning( @@ -2946,15 +2968,6 @@ def assess_connectivity( ) G = RoadwayNetwork.ox_graph(_nodes_df, _links_df) - # sub_graphs = [s for s in sorted(nx.strongly_connected_component_subgraphs(G), key=len, reverse=True)] - sub_graphs = [ - s - for s in sorted( - (G.subgraph(c) for c in nx.strongly_connected_components(G)), - key=len, - reverse=True, - ) - ] sub_graph_nodes = [ list(s) @@ -3042,13 +3055,7 @@ def selection_map( graph_links = self.links_df.loc[graph_link_idx] node_list_foreign_keys = list( - set( - [ - i - for fk in self.link_foreign_key - for i in list(graph_links[fk]) - ] - ) + set([i for fk in self.link_foreign_key for i in list(graph_links[fk])]) ) graph_nodes = self.nodes_df.loc[node_list_foreign_keys] @@ -3102,8 +3109,6 @@ def deletion_map(self, links: dict, nodes: dict): # deleted_links = None # deleted_nodes = None - missing_error_message = [] - if links is not None: for key, val in links.items(): deleted_links = self.links_df[self.links_df[key].isin(val)] @@ -3155,19 +3160,13 @@ def addition_map(self, links: dict, nodes: dict): if links is not None: link_ids = [] for link in links: - link_ids.append(link.get(RoadwayNetwork.UNIQUE_LINK_KEY)) + link_ids.append(link.get(self.unique_link_key)) added_links = self.links_df[ - self.links_df[RoadwayNetwork.UNIQUE_LINK_KEY].isin(link_ids) + self.links_df[self.unique_link_key].isin(link_ids) ] node_list_foreign_keys = list( - set( - [ - i - for fk in self.link_foreign_key - for i in list(added_links[fk]) - ] - ) + set([i for fk in self.link_foreign_key for i in list(added_links[fk])]) ) try: candidate_nodes = self.nodes_df.loc[node_list_foreign_keys] diff --git a/network_wrangler/scenario.py b/network_wrangler/scenario.py index d1a75645..22658916 100644 --- a/network_wrangler/scenario.py +++ b/network_wrangler/scenario.py @@ -614,7 +614,9 @@ def _summarize_add_roadway(change: dict, change_summary: dict): change_summary["added_links"] = pd.DataFrame(change.get("links")) change_summary["added_nodes"] = pd.DataFrame(change.get("nodes")) change_summary["map"] = RoadwayNetwork.addition_map( - self.road_net, change.get("links"), change.get("nodes"), + self.road_net, + change.get("links"), + change.get("nodes"), ) return change_summary diff --git a/network_wrangler/transitnetwork.py b/network_wrangler/transitnetwork.py index 11e116bd..1ad84b22 100644 --- a/network_wrangler/transitnetwork.py +++ b/network_wrangler/transitnetwork.py @@ -117,9 +117,9 @@ def read( Args: feed_path: where to read transit network files from. - shapes_foreign_key: foreign key between shapes dataframe and roadway network nodes. Will default to SHAPES_FOREIGN_KEY if not provided. - stops_foreign_key: foreign key between stops dataframe and roadway network nodes. Will defaul to STOPS_FOREIGN_KEY if not provided. - id_scalar: scalar value added to create new stop and shape IDs when necessary. Will default to ID_SCALAR if not provided. + shapes_foreign_key: foreign key between shapes dataframe and roadway network nodes. Will default to SHAPES_FOREIGN_KEY if not provided. + stops_foreign_key: foreign key between stops dataframe and roadway network nodes. Will defaul to STOPS_FOREIGN_KEY if not provided. + id_scalar: scalar value added to create new stop and shape IDs when necessary. Will default to ID_SCALAR if not provided. Returns: a TransitNetwork object. """ @@ -900,7 +900,12 @@ def _apply_transit_feature_change_routing( WranglerLogger.error("Cannot create a unique new stop_id.") stops.loc[ len(stops.index) + 1, - ["stop_id", "stop_lat", "stop_lon", self.stops_foreign_key,], + [ + "stop_id", + "stop_lat", + "stop_lon", + self.stops_foreign_key, + ], ] = [ new_stop_id, nodes_df.loc[int(fk_i), "Y"], diff --git a/network_wrangler/utils.py b/network_wrangler/utils.py index b8c62795..34020170 100644 --- a/network_wrangler/utils.py +++ b/network_wrangler/utils.py @@ -14,16 +14,15 @@ from .logger import WranglerLogger -def point_df_to_geojson( - df: pd.DataFrame, - properties: list, - node_foreign_key = None): +def point_df_to_geojson(df: pd.DataFrame, properties: list, node_foreign_key=None): """ Author: Geoff Boeing: https://geoffboeing.com/2015/10/exporting-python-data-geojson/ """ from .roadwaynetwork import NODE_FOREIGN_KEY - if not node_foreign_key: node_foreign_key = NODE_FOREIGN_KEY + + if not node_foreign_key: + node_foreign_key = NODE_FOREIGN_KEY geojson = {"type": "FeatureCollection", "features": []} for _, row in df.iterrows(): @@ -41,7 +40,7 @@ def point_df_to_geojson( def link_df_to_json(df: pd.DataFrame, properties: list): - """ Export pandas dataframe as a json object. + """Export pandas dataframe as a json object. Modified from: Geoff Boeing: https://geoffboeing.com/2015/10/exporting-python-data-geojson/ @@ -223,7 +222,7 @@ def offset_location_reference(location_reference, offset_meters=10): return out_location_reference -def haversine_distance(origin: list, destination: list, units = "miles"): +def haversine_distance(origin: list, destination: list, units="miles"): """ Calculates haversine distance between two points @@ -246,7 +245,7 @@ def haversine_distance(origin: list, destination: list, units = "miles"): ) * math.cos(math.radians(lat2)) * math.sin(dlon / 2) * math.sin(dlon / 2) c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) - d = {"meters": radius * c } # meters + d = {"meters": radius * c} # meters d["miles"] = d["meters"] * 0.000621371 # miles return d[units] @@ -296,12 +295,13 @@ def create_line_string(location_reference: list): return LineString([location_reference[0]["point"], location_reference[1]["point"]]) + def update_df( base_df: pd.DataFrame, update_df: pd.DataFrame, merge_key: str = None, left_on: str = None, - right_on: str = None, + right_on: str = None, update_fields: Collection = None, method: str = "update if found", ): @@ -312,8 +312,8 @@ def update_df( base_df: DataFrame to be updated update_df: DataFrame with with updated values merge_key: column to merge on (i.e. model_link_id). If not specified, must have left_on AND right_on. - left_on: key for base_df. Must also specify right_on. If not specified, must specify merge_key. - right_on: key for update_df. Must also specify left_on. If not specified, must specify merge_key. + left_on: key for base_df. Must also specify right_on. If not specified, must specify merge_key. + right_on: key for update_df. Must also specify left_on. If not specified, must specify merge_key. update_fields: required list of fields to update values for. Must be columns in update_df. method: string indicating how the dataframe should be updated. One of: - "update if found" (default) which will update the values if the update values are not NaN @@ -322,16 +322,18 @@ def update_df( Returns: Dataframe with updated values """ - valid_methods = ["update if found", "overwrite all", "update nan"] + valid_methods = ["update if found", "overwrite all", "update nan"] if method not in valid_methods: - raise ValueError("Specified 'method' was: {} but must be one of: {}".format(method, valid_methods)) - - if update_fields is None: raise ValueError( - "Must specify which fields to update, None specified." + "Specified 'method' was: {} but must be one of: {}".format( + method, valid_methods + ) ) + if update_fields is None: + raise ValueError("Must specify which fields to update, None specified.") + if not set(update_fields).issubset(update_df.columns): raise ValueError( "Update fields: {} not in update_df: {}".format( @@ -340,17 +342,23 @@ def update_df( ) new_fields = [v for v in update_fields if v not in base_df.columns] - update_fields = list(set(update_fields)-set(new_fields)) + update_fields = list(set(update_fields) - set(new_fields)) - if new_fields: + if new_fields: WranglerLogger.debug( - "Some update fields: {} not in base_df; adding then as new columns.".format(new_fields) + "Some update fields: {} not in base_df; adding then as new columns.".format( + new_fields + ) ) - + if merge_key and left_on or merge_key and right_on: - raise ValueError("Only need a merge_key or right_on and left_on but both specified") + raise ValueError( + "Only need a merge_key or right_on and left_on but both specified" + ) if not merge_key and not (left_on and right_on): - raise ValueError("Need either a merge_key or right_on and left_on but neither fully specified") + raise ValueError( + "Need either a merge_key or right_on and left_on but neither fully specified" + ) if merge_key: left_on = merge_key @@ -377,24 +385,28 @@ def update_df( merged_df = base_df.merge( update_df[update_fields + [(right_on)]], left_on=left_on, - right_on= right_on, + right_on=right_on, how="left", suffixes=suffixes, ) # print("merged_df:\n",merged_df) if method == "overwrite all": - merged_df = merged_df.drop(columns=[c + "-orig" for c in update_fields if c + "-orig" in merged_df.columns]) + merged_df = merged_df.drop( + columns=[ + c + "-orig" for c in update_fields if c + "-orig" in merged_df.columns + ] + ) merged_df = merged_df[base_df.columns.tolist()] elif method == "update if found": - #overwrite if the updated field is not Nan + # overwrite if the updated field is not Nan for c in update_fields: - # selects rows where updated value is not NA; + # selects rows where updated value is not NA; merged_df.loc[~merged_df[c + "-update"].isna(), c] = merged_df.loc[ ~merged_df[c + "-update"].isna(), c + "-update" ] merged_df = merged_df.drop(columns=[c + "-update" for c in update_fields]) elif method == "update nan": - #overwrite if the base field IS Nan + # overwrite if the base field IS Nan for c in update_fields: # print(merged_df.apply(lambda row: row[c+"-update"] if not row[c] else row[c],axis=1)) merged_df.loc[merged_df[c].isna(), c] = merged_df.loc[ @@ -405,9 +417,9 @@ def update_df( if new_fields: merged_df = merged_df.merge( - update_df[new_fields+ [(right_on)]], + update_df[new_fields + [(right_on)]], left_on=left_on, - right_on= right_on, + right_on=right_on, how="left", ) return merged_df diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py index 6f69341c..51af03a7 100644 --- a/tests/test_notebooks.py +++ b/tests/test_notebooks.py @@ -9,14 +9,18 @@ os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "notebook" ) + @pytest.mark.notebooks def test_notebooks(): - fixture = NBRegressionFixture(exec_timeout=50, diff_ignore=( - '/cells/*/execution_count', - '/metadata/language_info/version', - '/cells/*/outputs') - ) + fixture = NBRegressionFixture( + exec_timeout=50, + diff_ignore=( + "/cells/*/execution_count", + "/metadata/language_info/version", + "/cells/*/outputs", + ), + ) - for file in glob.glob(os.path.join(NOTEBOOK_DIR,"*.ipynb")): + for file in glob.glob(os.path.join(NOTEBOOK_DIR, "*.ipynb")): print(file) fixture.check(str(file)) diff --git a/tests/test_roadway.py b/tests/test_roadway.py index 26b89c97..782fa9c9 100644 --- a/tests/test_roadway.py +++ b/tests/test_roadway.py @@ -43,7 +43,7 @@ def _read_small_net(): node_filename=SMALL_NODE_FILE, shape_filename=SMALL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) return net @@ -54,7 +54,7 @@ def _read_stpaul_net(): node_filename=STPAUL_NODE_FILE, shape_filename=STPAUL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) return net @@ -76,7 +76,7 @@ def test_roadway_read_write(request): node_filename=SMALL_NODE_FILE, shape_filename=SMALL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) time1 = time.time() print("Writing to: {}".format(SCRATCH_DIR)) @@ -86,7 +86,7 @@ def test_roadway_read_write(request): link_filename=out_link_file, node_filename=out_node_file, shape_filename=out_shape_file, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) time3 = time.time() @@ -122,14 +122,14 @@ def test_quick_roadway_read_write(request): node_filename=SMALL_NODE_FILE, shape_filename=SMALL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) net.write(filename=out_prefix, path=SCRATCH_DIR) net_2 = RoadwayNetwork.read( link_filename=out_link_file, node_filename=out_node_file, shape_filename=out_shape_file, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) print("--Finished:", request.node.name) @@ -344,6 +344,7 @@ def test_managed_lane_change_functionality(request): print("--Finished:", request.node.name) + @pytest.mark.roadway @pytest.mark.travis def test_add_adhoc_field(request): @@ -682,6 +683,7 @@ def test_query_roadway_property_by_time_group(request, variable_query): ## todo make test make sure the values are correct. + @pytest.mark.roadway @pytest.mark.travis def test_write_model_net(request): @@ -708,6 +710,7 @@ def test_write_model_net(request): print("--Finished:", request.node.name) + @pytest.mark.roadway @pytest.mark.travis def test_network_connectivity(request): @@ -720,6 +723,7 @@ def test_network_connectivity(request): print("Drive Network Connected:", net.is_network_connected(mode="drive")) print("--Finished:", request.node.name) + @pytest.mark.roadway @pytest.mark.travis def test_get_modal_network(request): @@ -730,7 +734,9 @@ def test_get_modal_network(request): net = _read_stpaul_net() _links_df, _nodes_df = RoadwayNetwork.get_modal_links_nodes( - net.links_df, net.nodes_df, modes=[mode], + net.links_df, + net.nodes_df, + modes=[mode], ) test_links_of_selection = _links_df["model_link_id"].tolist() @@ -752,6 +758,7 @@ def test_get_modal_network(request): assert set(test_links_of_selection) == set(control_links_of_selection) + @pytest.mark.roadway @pytest.mark.travis def test_network_connectivity_ignore_single_nodes(request): @@ -765,7 +772,8 @@ def test_network_connectivity_ignore_single_nodes(request): print("{} Disconnected Subnetworks:".format(len(disconnected_nodes))) print("-->\n{}".format("\n".join(list(map(str, disconnected_nodes))))) print("--Finished:", request.node.name) - #TODO #240 + # TODO #240 + @pytest.mark.roadway @pytest.mark.travis @@ -917,6 +925,7 @@ def test_add_roadway_shape(request): print("--Finished:", request.node.name) + @pytest.mark.travis @pytest.mark.roadway def test_create_ml_network_shape(request): @@ -992,7 +1001,7 @@ def test_apply_pycode_roadway(request): "AFTER CHANGE...\n", net.links_df.loc[net.links_df["lanes"] == 12, ["model_link_id", "lanes"]], ) - #TODO #241 + # TODO #241 @pytest.mark.travis @@ -1040,7 +1049,8 @@ def test_find_segment(request): sel_dict = {"name": "North Mounds Boulevard", "ref": "US 61"} seg_df = net.identify_segment(seg_ends[0], seg_ends[1], selection_dict=sel_dict) print(seg_df) - #TODO #242 + # TODO #242 + @pytest.mark.travis @pytest.mark.roadway @@ -1063,9 +1073,7 @@ def test_duplicates_in_ml_network(request): shapes_df = ml_net.shapes_df nodes_df = ml_net.nodes_df - duplicate_links_df = links_df[ - links_df.duplicated(subset=["A", "B"], keep="first") - ] + duplicate_links_df = links_df[links_df.duplicated(subset=["A", "B"], keep="first")] duplicate_nodes_df = nodes_df[ nodes_df.duplicated(subset=["model_node_id"], keep="first") diff --git a/tests/test_scenario.py b/tests/test_scenario.py index 9a6731fc..664545db 100644 --- a/tests/test_scenario.py +++ b/tests/test_scenario.py @@ -8,6 +8,7 @@ from network_wrangler import Scenario from network_wrangler.logger import WranglerLogger from network_wrangler.roadwaynetwork import UNIQUE_LINK_IDS + """ Run just the tests labeled scenario using `pytest -v -m scenario` To run with print statments, use `pytest -s -m scenario` @@ -88,7 +89,7 @@ def test_scenario_conflicts(request): print("Conflict checks done:", scen.conflicts_checked) print("--Finished:", request.node.name) - #todo #243 + # todo #243 @pytest.mark.scenario @@ -126,7 +127,7 @@ def test_scenario_requisites(request): print("Requisite checks done:", scen.requisites_checked) print("--Finished:", request.node.name) - #todo #244 + # todo #244 @pytest.mark.scenario @@ -166,7 +167,7 @@ def test_project_sort(request): scen.order_project_cards() print("Ordered Projects:", scen.get_project_names()) print("--Finished:", request.node.name) - #todo #245 + # todo #245 @pytest.mark.roadway @@ -184,7 +185,8 @@ def test_managed_lane_project_card(request): print(project_card) print("--Finished:", request.node.name) - #todo #246 + # todo #246 + # selection, answer query_tests = [ @@ -270,6 +272,7 @@ def test_managed_lane_project_card(request): ), ] + @pytest.mark.parametrize("test_spec", query_tests) @pytest.mark.travis def test_query_builder(request, test_spec): @@ -287,6 +290,7 @@ def test_query_builder(request, test_spec): print("--Finished:", request.node.name) + @pytest.mark.scenario @pytest.mark.travis def test_apply_summary_wrappers(request): @@ -311,7 +315,7 @@ def test_apply_summary_wrappers(request): node_filename=STPAUL_NODE_FILE, shape_filename=STPAUL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ), "transit_net": TransitNetwork.read(STPAUL_DIR), } @@ -325,7 +329,7 @@ def test_apply_summary_wrappers(request): my_scenario.scenario_summary() print("--Finished:", request.node.name) - #todo #247 + # todo #247 @pytest.mark.scenario @@ -348,4 +352,4 @@ def test_scenario_building_from_script(request): p.communicate() # wait for the subprocess call to finish print("--Finished:", request.node.name) - #todo #248 + # todo #248 diff --git a/tests/test_transit.py b/tests/test_transit.py index 763b918a..ea1b3f93 100644 --- a/tests/test_transit.py +++ b/tests/test_transit.py @@ -25,7 +25,7 @@ def test_transit_read_write(request): print("Transit Write Directory:", SCRATCH_DIR) print("--Finished:", request.node.name) - #todo #249 + # todo #249 @pytest.mark.travis @@ -264,7 +264,7 @@ def test_wrong_existing(request): net.apply_transit_feature_change( selected_trips, [{"property": "headway_secs", "existing": 553, "set": 900}] ) - #todo #250 + # todo #250 print("--Finished:", request.node.name) @@ -281,7 +281,7 @@ def test_zero_valid_facilities(request): "time": ["06:00:00", "09:00:00"], } ) - #todo #251 + # todo #251 print("--Finished:", request.node.name) @@ -296,7 +296,8 @@ def test_invalid_selection_key(request): net.select_transit_features({"trip_ids": ["14941433-JUN19-MVS-BUS-Weekday-01"]}) print("--Finished:", request.node.name) - #todo #252 + # todo #252 + @pytest.mark.transit @pytest.mark.travis @@ -325,7 +326,8 @@ def test_invalid_optional_selection_variable(request): assert set(sel) == set(["14978409-JUN19-MVS-BUS-Weekday-01"]) print("--Finished:", request.node.name) - #todo #253 + # todo #253 + @pytest.mark.travis def test_transit_road_consistencies(request): @@ -341,7 +343,7 @@ def test_transit_road_consistencies(request): node_filename=STPAUL_NODE_FILE, shape_filename=STPAUL_SHAPE_FILE, fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) net.set_roadnet(road_net=road_net) @@ -349,7 +351,8 @@ def test_transit_road_consistencies(request): net.validate_road_network_consistencies() print(net.validated_road_network_consistency) print("--Finished:", request.node.name) - #todo #254 + # todo #254 + if __name__ == "__main__": test_transit_read_write() diff --git a/tests/test_transit_with_roadnet.py b/tests/test_transit_with_roadnet.py index f166a4bf..e5d5c09b 100644 --- a/tests/test_transit_with_roadnet.py +++ b/tests/test_transit_with_roadnet.py @@ -26,7 +26,7 @@ def test_set_roadnet(request): node_filename=os.path.join(STPAUL_DIR, "node.geojson"), shape_filename=os.path.join(STPAUL_DIR, "shape.geojson"), fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) transit_net = TransitNetwork.read(STPAUL_DIR) transit_net.set_roadnet(road_net) @@ -46,7 +46,7 @@ def test_project_card(request): node_filename=os.path.join(STPAUL_DIR, "node.geojson"), shape_filename=os.path.join(STPAUL_DIR, "shape.geojson"), fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) transit_net = TransitNetwork.read(STPAUL_DIR) transit_net.road_net = road_net @@ -126,7 +126,7 @@ def test_wo_existing(request): node_filename=os.path.join(STPAUL_DIR, "node.geojson"), shape_filename=os.path.join(STPAUL_DIR, "shape.geojson"), fast=True, - shape_foreign_key ='shape_id', + shape_foreign_key="shape_id", ) transit_net = TransitNetwork.read(STPAUL_DIR) transit_net.road_net = road_net @@ -167,6 +167,7 @@ def test_wo_existing(request): print("--Finished:", request.node.name) + @pytest.mark.roadway @pytest.mark.transit @pytest.mark.travis diff --git a/tests/test_utils.py b/tests/test_utils.py index c6d7dea8..2f263e3b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -6,9 +6,6 @@ from shapely.geometry import LineString - - - slug_test_list = [ {"text": "I am a roadway", "delim": "_", "answer": "i_am_a_roadway"}, {"text": "I'm a roadway", "delim": "_", "answer": "im_a_roadway"}, @@ -61,9 +58,9 @@ def test_time_convert(request): @pytest.mark.get_dist @pytest.mark.travis def test_get_distance_bw_lat_lon(request): - + print("\n--Starting:", request.node.name) - + from network_wrangler import haversine_distance start = [-93.0889873, 44.966861] @@ -116,7 +113,7 @@ def test_location_reference_offset(request): print("--Finished:", request.node.name) -update_test_list =[ +update_test_list = [ { "method": "update if found", "update_fields": ["cb"], @@ -127,7 +124,7 @@ def test_location_reference_offset(request): "cb": ["a", "bb", "cc", "dd"], "cc": [111, 222, 333, 444], } - ) + ), }, { "method": "update nan", @@ -139,7 +136,7 @@ def test_location_reference_offset(request): "cb": ["a", "bb", "c", "dd"], "cc": [111, 222, 333, 444], } - ) + ), }, { "method": "overwrite all", @@ -151,7 +148,7 @@ def test_location_reference_offset(request): "cb": [np.NaN, "bb", "cc", "dd"], "cc": [111, 222, 333, 444], } - ) + ), }, { "method": "update nan", @@ -164,10 +161,11 @@ def test_location_reference_offset(request): "cc": [111, 222, 333, 444], "zz": [np.NaN, "like", "ice", "cream."], } - ) + ), }, ] + @pytest.mark.update_df @pytest.mark.travis @pytest.mark.parametrize("update_test", update_test_list) @@ -196,7 +194,16 @@ def test_update_df(request, update_test): } ) - - result_df = update_df(df1, df2, "id", update_fields=update_test['update_fields'], method = update_test['method']) - print("UPDATE METHOD: {}\nResulting DF:\n{}\nExpected DF:\n{}".format(update_test['method'], result_df, update_test['expected_result'] )) - pd.testing.assert_frame_equal( update_test['expected_result'], result_df) + result_df = update_df( + df1, + df2, + "id", + update_fields=update_test["update_fields"], + method=update_test["method"], + ) + print( + "UPDATE METHOD: {}\nResulting DF:\n{}\nExpected DF:\n{}".format( + update_test["method"], result_df, update_test["expected_result"] + ) + ) + pd.testing.assert_frame_equal(update_test["expected_result"], result_df)