diff --git a/input/user_input_tests.xlsx b/input/user_input_tests.xlsx index 11a8327e3..06a4247a0 100644 Binary files a/input/user_input_tests.xlsx and b/input/user_input_tests.xlsx differ diff --git a/old/criticality/network_functions_v2.py b/old/criticality/network_functions_v2.py index 7954e8eda..ab463b0c0 100644 --- a/old/criticality/network_functions_v2.py +++ b/old/criticality/network_functions_v2.py @@ -1636,7 +1636,7 @@ def add_od_nodes(graph, od, id_name, name=None, file_output=None, save_shp=False # Check which roads belong to the centroids closest vertices all_matches = [e for e in graph.edges(data=True, keys=True) if str(e[-1][id_name]) == od.iloc[i]['match_ids']] if len(all_matches) > 1: - all_matches = [am for am in all_matches if match_OD in [Point(p) for p in set(list(am[-1]['geometry'].coords))]] + all_matches = [am for am in all_matches if match_OD in [(Point(p) for p in set(list(am[-1]['geometry'].coords)))]] m = all_matches[0] if 'geometry' in m[-1]: diff --git a/ra2ce/analyses_indirect.py b/ra2ce/analyses_indirect.py index e0655fed9..3970a8fbd 100644 --- a/ra2ce/analyses_indirect.py +++ b/ra2ce/analyses_indirect.py @@ -100,7 +100,7 @@ def multi_link_alternative_routes(G, InputDict, crs=4326): if 'shp_unique_ID' in InputDict: id_name = InputDict['shp_unique_ID'] else: - id_name = 'osmid' + id_name = 'G_fid_simple' # initiate variables id_name_hazard = None @@ -394,13 +394,13 @@ def multi_link_od_matrix(G, InputDict, crs=4326): # initiate variables id_name_hazard = None - weighing = 'time' # TODO: make this variable + weighing = 'length' # TODO: make this variable # load the input files if they are there if 'id_name' in InputDict: id_name = InputDict['id_name'] else: - id_name = 'osmid' + id_name = 'G_fid_simple' if InputDict: # there is hazard data available @@ -408,28 +408,54 @@ def multi_link_od_matrix(G, InputDict, crs=4326): id_name_hazard = InputDict['ID'] # not all edges contain the attribute 'geometry' - because of geometry simplification these are streets that are straight and can be computed - # TODO check if this is necessary - G = add_missing_geoms_graph(G) - - if (id_name_hazard is None) & (len(InputDict) != 0): - G = hazard_intersect_graph(G, InputDict['hazard_data'], InputDict['hazard_attribute_name'], InputDict['analysis_name'], - agg=InputDict['hazard_aggregation']) + G_hazard_path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard.gpickle')) + # check for G_hazard first (similar to ra2ce.py main script) Hazard intersect can take long. + if not (G_hazard_path.exists()): + print('G_hazard does not exist. Hazard intersect starts now') + # TODO check if this is necessary + G = add_missing_geoms_graph(G) + + if (id_name_hazard is None) & (len(InputDict) != 0): + G = hazard_intersect_graph(G, InputDict['hazard_data'], InputDict['hazard_attribute_name'], InputDict['analysis_name'], + agg=InputDict['hazard_aggregation']) + path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard.gpickle')) + nx.write_gpickle(G, path, protocol=4) + print(path, 'saved') + else: + print('G_hazard already exists, uses the existing one!: {}'.format(G_hazard_path)) + G = nx.read_gpickle(G_hazard_path) # Add the origin/destination nodes to the network ods = read_OD_files(InputDict['origin_shp'], InputDict['o_names'], InputDict['destination_shp'], InputDict['d_names'], InputDict['id_name_origin_destination'], crs) - ods = create_OD_pairs(ods, G, id_name) - G = add_od_nodes(G, ods, id_name, name=InputDict['analysis_name'], file_output=InputDict['output'], save_shp=True) + od_pairs_path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_od_pairs.shp')) + + # check for G_hazard first (similar to ra2ce.py main script) Hazard intersect can take long. + if not (od_pairs_path.exists()): + # todo: after check put save_shp back to False + ods = create_OD_pairs(ods, G, id_name, InputDict=InputDict, save_shp=True, save_pickle=True) + else: + print('OD_pairs already exists, uses the existing one!: {}'.format(od_pairs_path)) + # ods = nx.read_gpickle(od_pairs_path) + ods = gpd.read_file(od_pairs_path) + ods.rename(columns={'geometry':'OD'},inplace=True) + + od_graph_path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_OD.gpickle')) + if not (od_graph_path.exists()): + # todo: after check put save_shp back to False + G = add_od_nodes(G, ods, id_name, name=InputDict['analysis_name'], InputDict=InputDict, save_shp=True, save_pickle=True) + else: + print('G_hazard_OD_pairs already exists, uses the existing one!: {}'.format(od_graph_path)) + G = nx.read_gpickle(od_graph_path) if weighing == 'time': # not yet possible for input with shapefiles, except when a max speed attribute is attached to the shapefile # calculate the time it takes per road segment - avg_speeds = calc_avg_speed(G, 'highway', save_csv=True, - save_path=os.path.join(InputDict['output'], 'avg_speeds_{}.csv'.format(InputDict['analysis_name']))) + avg_speeds = calc_avg_speed(G, 'highway', save_csv=True, save_path=os.path.join(InputDict['output'], 'avg_speeds_{}.csv'.format(InputDict['analysis_name']))) avg_speeds = pd.read_csv(os.path.join(InputDict['output'], 'avg_speeds_{}.csv'.format(InputDict['analysis_name']))) if len(avg_speeds.loc[avg_speeds['avg_speed'] == 0]) > 0: logging.info("An average speed of 50 is used in locations where the maximum speed limit is 0 in OSM data.") @@ -1261,7 +1287,7 @@ def read_OD_files(origin_paths, origin_names, destination_paths, destination_nam return od -def create_OD_pairs(od, graph, id_name, name=None, file_output=None, save_shp=False, save_pickle=False): +def create_OD_pairs(od, graph, id_name, name=None, InputDict=None, save_shp=False, save_pickle=False): """Get centroids of the selected NUTS-3 regions and gets closest vertice on the road of a graph. Args: origins [string]: file path of shapefile of the NUTS-3 regions in Europe @@ -1295,13 +1321,20 @@ def create_OD_pairs(od, graph, id_name, name=None, file_output=None, save_shp=Fa od = find_closest_vertice(od, idx, all_vertices, vertices_dict, edge_list, id_name) # save OD points + # if save_shp: + # gdf_to_shp(od, os.path.join(file_output, name + "_od_pairs.shp")) + # print("Saved OD pairs to shapefiles: {} and {}".format(os.path.join(file_output, name + "_od_pairs.shp"))) + # if save_pickle: + # pickle.dump(od, open(os.path.join(file_output, name + "_od_pairs.p"), 'wb')) + # print("Saved OD pairs to pickles: {} and {}".format(os.path.join(file_output, name + "_od_pairs.p"))) if save_shp: - gdf_to_shp(od, os.path.join(file_output, name + "_od_pairs.shp")) - print("Saved OD pairs to shapefiles: {} and {}".format(os.path.join(file_output, name + "_od_pairs.shp"))) + path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_OD_pairs.shp')) + gdf_to_shp(od, path) + print("\nThe shapefile with OD pairs can be found here:\n{}".format(path)) if save_pickle: - pickle.dump(od, open(os.path.join(file_output, name + "_od_pairs.p"), 'wb')) - print("Saved OD pairs to pickles: {} and {}".format(os.path.join(file_output, name + "_od_pairs.p"))) - + path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_od_pairs.gpickle')) + nx.write_gpickle(od, path, protocol=4) + print("\nThe pickle with OD pairs can be found here:\n{}".format(path)) return od @@ -1339,8 +1372,24 @@ def find_closest_vertice(origins_destinations, spatial_idx, search_vertices, ver return origins_destinations - -def add_od_nodes(graph, od, id_name, name=None, file_output=None, save_shp=False, save_pickle=False): +def find_new_nearest_vertice(edge_list,graph, od, id_name, match_OD,i): + vertices_dict = {} + for line in edge_list: + vertices_dict[(line[0], line[1])] = [Point(p) for p in set(list(line[-1]['geometry'].coords))] + all_vertices = [p for sublist in list(vertices_dict.values()) for p in sublist] + # create an empty spatial index object to search in + idx = rtree.index.Index() + # populate the spatial index + for j, pnt in enumerate(all_vertices): + idx.insert(j, pnt.bounds) + # find the closest vertice and line the vertice lays on + target = list(idx.nearest(match_OD.coords[0])) + match_OD = [all_vertices[ip] for ip in target] + match_OD = match_OD[0] + all_matches = [am for am in edge_list if match_OD.coords[0] in [p for p in set(list(am[-1]['geometry'].coords))]] + return all_matches,match_OD + +def add_od_nodes(graph, od, id_name, name=None, InputDict=None, save_shp=False, save_pickle=False): """From a geodataframe of vertices on a graph, adds nodes on that graph. Args: graph [networkX graph]: graph of the roads of a or multiple European countries @@ -1358,32 +1407,51 @@ def add_od_nodes(graph, od, id_name, name=None, file_output=None, save_shp=False # Check the highest node id, to add on that max_node_id = max([n for n in graph.nodes()]) + print('adding OD nodes to graph...') for i in range(len(od.index)): + drawProgressBar(i / len(od.index)) # the vertice on the edge that is closest to the origin/destination point match_OD = od.iloc[i]['OD'] - # Check which roads belong to the centroids closest vertices all_matches = [e for e in graph.edges(data=True, keys=True) if str(e[-1][id_name]) == od.iloc[i]['match_ids']] if len(all_matches) > 1: - all_matches = [am for am in all_matches if - match_OD in [Point(p) for p in set(list(am[-1]['geometry'].coords))]] + # all_matches = [am for am in all_matches if match_OD in [(Point(p) for p in set(list(am[-1]['geometry'].coords)))]] + all_matches = [am for am in all_matches if match_OD.coords[0] in [p for p in set(list(am[-1]['geometry'].coords))]] + if len(all_matches) == 0: #created to find nearest vertice when a new edge has already been created + #todo build this in in the other def find nearest vertice + edge_list = [e for e in graph.edges(data=True, keys=True) if + str(e[-1][id_name]) == od.iloc[i]['match_ids']] + all_matches,match_OD = find_new_nearest_vertice(edge_list,graph, od, id_name, match_OD, i) + if len(all_matches) == 1: + if [am for am in all_matches if match_OD.coords[0] in [p for p in set(list(am[-1]['geometry'].coords))]] == []: + edge_list = [e for e in graph.edges(data=True, keys=True) if + str(e[-1][id_name]) == od.iloc[i]['match_ids']] + all_matches,match_OD = find_new_nearest_vertice(edge_list,graph, od, id_name, match_OD, i) + if len(all_matches) == 0: #when the edge does not exist anymore in the adjusted graph. look over the full graph and find the nearest vertice + edge_list = [e for e in graph.edges.data() if 'geometry' in e[-1]] + all_matches, match_OD = find_new_nearest_vertice(edge_list, graph, od, id_name, match_OD, i) m = all_matches[0] if 'geometry' in m[-1]: match_geom = m[-1]['geometry'] - match_edge = m[:3] + if len(m)==3: + match_edge = m[0],m[1],0 + else: + match_edge = m[:3] match_name = od.iloc[i]['o_id'] + if match_name == 'nan': + match_name = np.nan #convert string nans to np.nans to be able to differentiate between origins and destinations in the next step. if not match_name == match_name: # match_name is nan, the point is not an origin but a destination match_name = od.iloc[i]['d_id'] - new_lines = split_line_with_points(match_geom, [match_OD]) if len(new_lines) == 2: line1, line2 = new_lines else: # if the vertice is at the end of the road; you don't have to add a new node # but do add a new attribute to the node + if (graph.nodes[match_edge[0]]['geometry'].coords[0][1] == match_OD.coords[0][1]) & ( graph.nodes[match_edge[0]]['geometry'].coords[0][0] == match_OD.coords[0][0]): if 'od_id' in graph.nodes[match_edge[0]]: @@ -1397,8 +1465,12 @@ def add_od_nodes(graph, od, id_name, name=None, file_output=None, save_shp=False graph.nodes[match_edge[1]]['od_id'] = graph.nodes[match_edge[1]]['od_id'] + ',' + match_name else: graph.nodes[match_edge[1]]['od_id'] = match_name + elif (((graph.nodes[match_edge[0]]['geometry'].coords[0][1] == match_OD.coords[0][1]) & (graph.nodes[match_edge[0]]['geometry'].coords[0][0] == match_OD.coords[0][0])) == False) & + (((graph.nodes[match_edge[1]]['geometry'].coords[0][1] == match_OD.coords[0][1]) & (graph.nodes[match_edge[1]]['geometry'].coords[0][0] == match_OD.coords[0][0])) == False): + print(i) + print('continue') + continue continue - new_node_id = max_node_id + 1 max_node_id = new_node_id @@ -1453,11 +1525,14 @@ def add_od_nodes(graph, od, id_name, name=None, file_output=None, save_shp=False graph.remove_edge(u, v, k) if save_shp: - graph_to_shp(graph, os.path.join(file_output, '{}_OD_edges.shp'.format(name)), - os.path.join(file_output, '{}_OD_nodes.shp'.format(name))) + path_edges = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_OD_edges.shp')) + path_nodes = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_OD_nodes.shp')) + graph_to_shp(graph, path_edges,path_nodes) + print("Saved graph to shapefile in {}".format(path_edges)) if save_pickle: - nx.write_gpickle(graph, os.path.join(file_output, '{}_graph.gpickle'.format(name))) - print("Saved graph to pickle in {}".format(os.path.join(file_output, '{}_graph.gpickle'.format(name)))) + path = Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_hazard_OD.gpickle')) + nx.write_gpickle(graph, path, protocol=4) + print("Saved graph to pickle in {}".format(path)) return graph @@ -1479,8 +1554,9 @@ def preferred_routes_od(graph, weighing_name, idName, od, crs, hazard_data, shor pref_routes = gpd.GeoDataFrame(columns=['o_node', 'd_node', 'origin', 'destination', 'pref_path', weighing_name, 'match_ids', 'geometry'], geometry='geometry', crs={'init': 'epsg:{}'.format(crs)}) - + graph=graph.to_undirected() # create list of origin-destination pairs + od=od.replace('nan', np.nan) od_pairs = [(a, b) for a in od.loc[od['o_id'].notnull(), 'o_id'] for b in od.loc[od['d_id'].notnull(), 'd_id']] all_nodes = [(n, v['od_id']) for n, v in graph.nodes(data=True) if 'od_id' in v] od_nodes = [] @@ -1490,7 +1566,9 @@ def preferred_routes_od(graph, weighing_name, idName, od, crs, hazard_data, shor [(n, n_name) for n, n_name in all_nodes if (n_name == bb) | (bb in n_name)][0])) # create the routes between all OD pairs + i=0 for o, d in od_nodes: + drawProgressBar(i / len(od_nodes)) if nx.has_path(graph, o[0], d[0]): # calculate the length of the preferred route pref_route = nx.dijkstra_path_length(graph, o[0], d[0], weight=weighing_name) @@ -1519,6 +1597,7 @@ def preferred_routes_od(graph, weighing_name, idName, od, crs, hazard_data, shor 'destination': d[1], 'pref_path': pref_nodes, weighing_name: pref_route, 'match_ids': match_list, 'geometry': pref_edges}, ignore_index=True) + i=i+1 if shortest_route: pref_routes = pref_routes.loc[pref_routes.sort_values(weighing_name).groupby('o_node').head(3).index] @@ -1586,12 +1665,9 @@ def calc_avg_speed(graph, road_type_col_name, save_csv=False, save_path=None, ex all_road_types = exceptions + types df = pd.DataFrame({'road_types': all_road_types, 'avg_speed': 0}) - - - - # calculate average speed for i in range(len(df)): + print(i) roadtype = df.road_types[i] all_edges = [(u, v, edata['maxspeed'], edata['length']) for u, v, edata in graph.edges.data() if (str(edata[road_type_col_name]) == roadtype) & ('maxspeed' in edata)] @@ -1601,12 +1677,14 @@ def calc_avg_speed(graph, road_type_col_name, save_csv=False, save_path=None, ex if isinstance(s, list): ns = [] for ss in s: - if not any(c.isalpha() for c in ss) and not (';' in ss) and not ('|' in ss): + if not any(c.isalpha() for c in ss) and not (';' in ss) and not ('|' in ss) and not (',' in ss): ns.append(int(ss)) elif not any(c.isalpha() for c in ss) and ';' in ss: ns.extend([int(x) for x in ss.split(';') if x.isnumeric()]) elif not any(c.isalpha() for c in ss) and '|' in ss: ns.extend([int(x) for x in ss.split('|') if x.isnumeric()]) + elif not any(c.isalpha() for c in s) and ',' in s: + ns.extend([int(x) for x in s.split(',') if x.isnumeric()]) elif ' mph' in ss: ns.append(int(ss.split(' mph')[0]) * 1.609344) if len(ns) > 0: @@ -1614,12 +1692,14 @@ def calc_avg_speed(graph, road_type_col_name, save_csv=False, save_path=None, ex else: continue elif isinstance(s, str): - if not any(c.isalpha() for c in s) and not (';' in s) and not ('|' in s): + if not any(c.isalpha() for c in s) and not (';' in s) and not ('|' in s) and not (',' in s): ss = int(s) elif not any(c.isalpha() for c in s) and ';' in s: ss = mean([int(x) for x in s.split(';') if x.isnumeric()]) elif not any(c.isalpha() for c in s) and '|' in s: ss = mean([int(x) for x in s.split('|') if x.isnumeric()]) + elif not any(c.isalpha() for c in s) and ',' in s: + ss = mean([int(float(x)) for x in s.split(',')]) elif ' mph' in s: ss = int(s.split(' mph')[0]) * 1.609344 else: @@ -1654,6 +1734,7 @@ def assign_avg_speed(graph, avg_road_speed, road_type_col_name, save_path=None, # make a list of strings instead of just a string of the road types column avg_road_speed["road_types"] = avg_road_speed["road_types"].astype(str) + # calculate the average maximum speed per edge and assign the ones that don't have a value for u, v, k, edata in graph.edges.data(keys=True): road_type = str(edata[road_type_col_name]) @@ -1662,12 +1743,14 @@ def assign_avg_speed(graph, avg_road_speed, road_type_col_name, save_path=None, if isinstance(max_speed, list): ns = [] for ms in max_speed: - if not any(c.isalpha() for c in ms) and not (';' in ms) and not ('|' in ms): + if not any(c.isalpha() for c in ms) and not (';' in ms) and not ('|' in ms) and not (',' in ms): ns.append(int(ms)) elif not any(c.isalpha() for c in ms) and ';' in ms: ns.extend([int(x) for x in ms.split(';') if x.isnumeric()]) elif not any(c.isalpha() for c in ms) and '|' in ms: ns.extend([int(x) for x in ms.split('|') if x.isnumeric()]) + elif not any(c.isalpha() for c in ms) and ',' in ms: + ns.extend([int(x) for x in ms.split(',') if x.isnumeric()]) elif ' mph' in ms: ns.append(int(ms.split(' mph')[0]) * 1.609344) if len(ns) > 0: @@ -1676,12 +1759,14 @@ def assign_avg_speed(graph, avg_road_speed, road_type_col_name, save_path=None, graph[u][v][k]['avgspeed'] = \ avg_road_speed.loc[avg_road_speed['road_types'] == road_type, 'avg_speed'].iloc[0] elif isinstance(max_speed, str): - if not any(c.isalpha() for c in max_speed) and not (';' in max_speed) and not ('|' in max_speed): + if not any(c.isalpha() for c in max_speed) and not (';' in max_speed) and not ('|' in max_speed) and not (',' in max_speed): graph[u][v][k]['avgspeed'] = int(max_speed) elif not any(c.isalpha() for c in max_speed) and ';' in max_speed: graph[u][v][k]['avgspeed'] = mean([int(x) for x in max_speed.split(';') if x.isnumeric()]) elif not any(c.isalpha() for c in max_speed) and '|' in max_speed: graph[u][v][k]['avgspeed'] = mean([int(x) for x in max_speed.split('|') if x.isnumeric()]) + elif not any(c.isalpha() for c in max_speed) and ',' in max_speed: + graph[u][v][k]['avgspeed'] = mean([int(float(x)) for x in max_speed.split(',')]) elif ' mph' in max_speed: graph[u][v][k]['avgspeed'] = int(max_speed.split(' mph')[0]) * 1.609344 else: @@ -2132,6 +2217,7 @@ def criticality_multi_link_hazard_OD(graph, prefRoutes, weighingName, hazardName geometry='geometry', crs={'init': 'epsg:{}'.format(crs_)}) to_remove = [(e[0], e[1], e[2]) for e in graph.edges.data(keys=True) if (e[-1][hazardName] > threshold) & ('bridge' not in e[-1])] + # to_remove = [(e[0], e[1], e[2]) for e in graph.edges.data(keys=True) if (e[-1][hazardName] > threshold)] graph.remove_edges_from(to_remove) for ii in range(len(prefRoutes.index)): @@ -2154,6 +2240,7 @@ def criticality_multi_link_hazard_OD(graph, prefRoutes, weighingName, hazardName print(extra_time) if prefRoutes.iloc[ii][weighingName] != alt_route: # the alternative route is different from the optimal route + print('yes') disrupted = 1 detour = "alt_route" # found out which edges belong to the preferred path diff --git a/ra2ce/create_network_from_osm_dump.py b/ra2ce/create_network_from_osm_dump.py index fac8f4365..e6b54fa01 100644 --- a/ra2ce/create_network_from_osm_dump.py +++ b/ra2ce/create_network_from_osm_dump.py @@ -126,7 +126,7 @@ def fetch_roads(region, osm_pbf_path, **kwargs): def convert_osm(osm_convert_path, pbf, o5m): """ - Convers an osm PBF file to o5m + Converse an osm PBF file to o5m """ command = '""{}" "{}" --complete-ways --drop-broken-refs -o="{}""'.format(osm_convert_path, pbf, o5m) @@ -437,30 +437,30 @@ def graphs_from_o5m(o5m_path,save_shapes=None,bidirectional=False, simplify=True *G_simple* (Graph object) : simplified graph *ID_tables* (tuple with dicts) : (simple_to_complex, complex_to_simple) """ - from osmnx.utils import overpass_json_from_file - from osmnx.core import create_graph - from osmnx.simplify import simplify_graph + # from osmnx.utils import overpass_json_from_file + from osmnx.geometries import geometries_from_xml + from osmnx.graph import _create_graph + from osmnx.graph import graph_from_xml + from osmnx.simplification import simplify_graph # transmogrify file of OSM XML data into JSON filename = o5m_path - response_jsons = [overpass_json_from_file(filename)] + #TODO: check outcomes + # from old osmnx. why did we do this? This had to do something with the unique IDs therefore we split this. + # response_jsons = [overpass_json_from_file(filename)] + # G_complex = _create_graph(response_jsons, bidirectional=bidirectional, + # retain_all=retain_all) - # create graph using this response JSON - G_complex = create_graph(response_jsons, bidirectional=bidirectional, - retain_all=retain_all, name='unnamed') + G_complex = graph_from_xml(filename, bidirectional=bidirectional,simplify=False, + retain_all=retain_all) G_complex = graph_create_unique_ids(G_complex, 'G_fid_complex') print('graphs_from_o5m() returning graph with {:,} nodes and {:,} edges'.format(len(list(G_complex.nodes())), len(list(G_complex.edges())))) - if save_shapes is not None: - graph_to_shp(G_complex, Path(save_shapes).joinpath('{}_edges.shp'.format('G_complex')), - Path(save_shapes).joinpath('{}_nodes.shp'.format('G_complex'))) + # simplify the graph topology as the last step. - #TODO: (Kees' opinion): we should not save the shapes WITHIN this function, to keep the length of the function - #TODO: Managable, better to just return the objects, and have a seperate function that saves the shapes, - # for the case you want to do that - #If it would save something, it would save the graphs + if simplify: try: G_simple = simplify_graph(G_complex) @@ -476,11 +476,10 @@ def graphs_from_o5m(o5m_path,save_shapes=None,bidirectional=False, simplify=True # ... and add this info G_complex = add_simple_ID_to_G_complex(G_complex, complex_to_simple) print('Simple IDs were added to the complex Graph') - if save_shapes is not None: - graph_to_shp(G_simple, Path(save_shapes).joinpath('{}_edges.shp'.format('G_simple')), - Path(save_shapes).joinpath('{}_nodes.shp'.format('G_simple'))) + except: G_simple = None + ID_tables = None print('Did not create a simplified version of the graph') return G_complex,G_simple,ID_tables @@ -543,7 +542,7 @@ def graph_to_shp(G, edge_shp, node_shp): nodes.to_file(node_shp, driver='ESRI Shapefile', encoding='utf-8') edges.to_file(edge_shp, driver='ESRI Shapefile', encoding='utf-8') -def from_dump_tool_workflow(path_to_pbf,road_types,save_files=None, segmentation=None,save_shapes=None): +def from_dump_tool_workflow(InputDict,road_types,save_files=None, segmentation=None,save_shapes=None,simplify=True): """ Example workflow for use in the tool version of RA2CE @@ -564,16 +563,15 @@ def from_dump_tool_workflow(path_to_pbf,road_types,save_files=None, segmentation osm_convert_exe = ra2ce_main_path / 'osmconvert64.exe' osm_filter_exe = ra2ce_main_path / 'osmfilter.exe' assert osm_convert_exe.exists() and osm_filter_exe.exists() - + o5m_path = Path(str(os.path.splitext(os.path.splitext(InputDict["name_of_pbf"])[0])[0])+'.o5m') # Prepare the making of a new o5m in the same folder as the pbf - o5m_path= Path(str(os.path.splitext(os.path.splitext(path_to_pbf)[0])[0])+'.o5m') - o5m_filtered_path= Path(str(os.path.splitext(os.path.splitext(path_to_pbf)[0])[0])+'_filtered.o5m') + o5m_filtered_path= Path(str(os.path.splitext(os.path.splitext(InputDict["name_of_pbf"])[0])[0])+'_filtered.o5m') # CONVERT FROM PBF TO O5M if not o5m_path.exists(): assert not o5m_filtered_path.exists() print('Start conversion from pbf to o5m') - convert_osm(osm_convert_exe, path_to_pbf, o5m_path) + convert_osm(osm_convert_exe, InputDict["name_of_pbf"], o5m_path) print('Converted osm.pbf to o5m, created: {}'.format(o5m_path)) else: print('O5m path already exists, uses the existing one!: {}'.format(o5m_path)) @@ -587,7 +585,7 @@ def from_dump_tool_workflow(path_to_pbf,road_types,save_files=None, segmentation assert o5m_path.exists() and o5m_filtered_path.exists() - G_complex, G_simple, ID_tables = graphs_from_o5m(o5m_filtered_path, save_shapes=save_shapes, bidirectional=False, simplify=True, + G_complex, G_simple,ID_tables = graphs_from_o5m(o5m_filtered_path, save_shapes=save_shapes, bidirectional=False, simplify=simplify, retain_all=False) @@ -595,27 +593,33 @@ def from_dump_tool_workflow(path_to_pbf,road_types,save_files=None, segmentation #CONVERT GRAPHS TO GEODATAFRAMES print('Start converting the graphs to geodataframes') edges_complex, node_complex = graph_to_gdf(G_complex) - # edges_simple, nodes_simple = graph_to_gdf(G_simple) - print('Finished converting the graphs to geodataframes') + print('Finished converting G_complex to geodataframes') #first save the G_complex, G_simple and edges_complex (when not cut yet!). # When segmentation is yes, the edges_complex will be overwritten with the cut version and given as output # Todo: check what needs to be output for damage? edges_complex or edges_complex_cut! if save_files: - output_path = Path(__file__).parents[1] / 'test/output/' - - path = output_path / 'G_simple.gpickle' + path = Path(InputDict['output'] / (str(InputDict['analysis_name'])+'_G_simple.gpickle')) nx.write_gpickle(G_simple, path, protocol=4) print(path, 'saved') - path = output_path / 'G_complex.gpickle' + path = Path(InputDict['output'] / (str(InputDict['analysis_name'])+'_G_complex.gpickle')) nx.write_gpickle(G_complex, path, protocol=4) print(path, 'saved') - with open((output_path / 'edges_complex.p'), 'wb') as handle: + edges_complex, node_complex = graph_to_gdf(G_complex) + with open(str((InputDict['output']) / (str(InputDict['analysis_name'])+'_edges_complex.p')), 'wb') as handle: pickle.dump(edges_complex, handle) - print(output_path, 'edges_complex.p saved') + print(str((InputDict['output']) / (str(InputDict['analysis_name'])+'_edges_complex.p saved'))) + + if save_shapes is not None: + graph_to_shp(G_complex,Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_complex_edges.shp')), + Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_complex_nodes.shp'))) + if simplify: + graph_to_shp(G_simple, Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_simple_edges.shp')), + Path(InputDict['output'] / (str(InputDict['analysis_name']) + '_G_simple_nodes.shp'))) # cut the edges in the complex geodataframe to segments of equal lengths or smaller # output will be the cut edges_complex + #TODO change save shapes names which starts with analysis name similar to save_files (above) if segmentation is not None: edges_complex = cut_gdf(edges_complex, segmentation) print('Finished segmenting the geodataframe with split length: {} degree'.format(segmentation)) diff --git a/ra2ce/create_network_from_polygon.py b/ra2ce/create_network_from_polygon.py index b01f8285e..a8a1d9db7 100644 --- a/ra2ce/create_network_from_polygon.py +++ b/ra2ce/create_network_from_polygon.py @@ -16,13 +16,13 @@ import fiona from shapely.geometry import MultiPolygon from shapely.geometry import Polygon - +import pickle +from shapely.wkt import loads from osmnx.truncate import truncate_graph_polygon from osmnx.simplification import simplify_graph from osmnx.projection import project_geometry from osmnx.utils_graph import count_streets_per_node import osmnx.settings as settings - # local modules from create_network_from_osm_dump import graph_create_unique_ids from create_network_from_osm_dump import graph_to_gdf @@ -178,7 +178,7 @@ def graph_from_polygon(polygon, network_type='all_private', return G -def get_graph_from_polygon(InputDict, undirected=True, simplify=True, save_shapes=''): +def get_graph_from_polygon(InputDict, undirected=True, simplify=True, save_shapes='',save_files=''): """ Get an OSMnx graph from a polygon shapefile . @@ -210,6 +210,9 @@ def get_graph_from_polygon(InputDict, undirected=True, simplify=True, save_shape for r in source: if 'geometry' in r: # added this line to not take into account "None" geometry polygon = shape(r['geometry']) + # wkt = polygon.to_wkt() + # geom = loads(wkt) + # polygon, _ = osmnx.projection.project_geometry(polygon, crs={'init': 'epsg:4326'}, to_latlong=True) if 'road_types' in InputDict: RoadTypes = InputDict['road_types'] @@ -217,6 +220,7 @@ def get_graph_from_polygon(InputDict, undirected=True, simplify=True, save_shape print(cf) # assuming the empty cell in the excel is a numpy.float64 nan value #osmnx 0.16/1.01 + G_complex = osmnx.graph_from_polygon(polygon=polygon, custom_filter=cf, simplify=False, retain_all=True) # osmnx OLD # G_complex = graph_from_polygon(polygon=polygon, network_type=NetworkType,infrastructure='way["highway"~"motorway|trunk|primary"]', simplify=False) @@ -245,13 +249,25 @@ def get_graph_from_polygon(InputDict, undirected=True, simplify=True, save_shape graph_to_shp(G_complex, Path(InputDict['output']/(str(InputDict['analysis_name'])+'_G_complex_edges.shp')), Path(InputDict['output']/(str(InputDict['analysis_name'])+'_G_complex_nodes.shp'))) + if simplify: graph_to_shp(G_simple, Path(InputDict['output']/(str(InputDict['analysis_name'])+'_G_simple_edges.shp')), Path(InputDict['output']/(str(InputDict['analysis_name'])+'_G_simple_nodes.shp'))) + if save_files: + path = Path(InputDict['output'] / (str(InputDict['analysis_name'])+'_G_simple.gpickle')) + nx.write_gpickle(G_simple, path, protocol=4) + print(path, 'saved') + path = Path(InputDict['output'] / (str(InputDict['analysis_name'])+'_G_complex.gpickle')) + nx.write_gpickle(G_complex, path, protocol=4) + print(path, 'saved') + edges_complex, node_complex = graph_to_gdf(G_complex) + with open(str((InputDict['output']) / (str(InputDict['analysis_name'])+'_edges_complex.p')), 'wb') as handle: + pickle.dump(edges_complex, handle) + print(str((InputDict['output']) / (str(InputDict['analysis_name'])+'_edges_complex.p saved'))) return G_complex, G_simple -def from_polygon_tool_workflow(InputDict): +def from_polygon_tool_workflow(InputDict, save_shapes= '', save_files=''): """ Example workflow for use in the tool version of RA2CE @@ -269,7 +285,7 @@ def from_polygon_tool_workflow(InputDict): G_complex_edges (GeoDataFrame : Complex graph (for use in the direct analyses) """ ra2ce_main_path = Path(__file__).parents[1] - G_complex, G_simple =get_graph_from_polygon(InputDict, save_shapes=True) + G_complex, G_simple =get_graph_from_polygon(InputDict, save_shapes=save_shapes, save_files=save_files) #CONVERT GRAPHS TO GEODATAFRAMES print('Start converting the graphs to geodataframes') diff --git a/ra2ce/ra2ce.py b/ra2ce/ra2ce.py index c62f0e5d6..1ab598b49 100644 --- a/ra2ce/ra2ce.py +++ b/ra2ce/ra2ce.py @@ -76,8 +76,8 @@ def create_network(inputDict): output_path = inputDict['output'] # when G_simple and edges_complex already exist no need to create new graph and gdf - G_simple_path = output_path / 'G_simple.gpickle' - edges_complex_path = output_path / 'edges_complex.p' + G_simple_path = Path(inputDict['output'] / (str(inputDict['analysis_name'])+'_G_simple.gpickle')) + edges_complex_path = Path(inputDict['output'] / (str(inputDict['analysis_name'])+'_edges_complex.p')) #check whether the paths exist if not (G_simple_path.exists() and edges_complex_path.exists()): @@ -88,14 +88,15 @@ def create_network(inputDict): elif inputDict['network_source'] == 'Network based on OSM dump': print('start creating network from osm_dump') roadTypes = inputDict['road_types'].lower().replace(' ', ' ').split(',') - G, edge_gdf = from_dump_tool_workflow(inputDict["name_of_pbf"], roadTypes, save_files=True,segmentation=None) + #todo: built in that save_shp is automatically None based on input table + G, edge_gdf = from_dump_tool_workflow(inputDict, roadTypes, save_files=True, segmentation=None, save_shapes=inputDict["output"],simplify=True) #in case of save shapes add here path elif inputDict['network_source'] == 'Network based on OSM online': print('start creating network from osm_online') inputDict['network_type'] = inputDict['network_type'].lower().replace(' ', '') # decapitalize and remove all whitespaces if 'road_types' in inputDict: inputDict['road_types'] = inputDict['road_types'].lower().replace(', ', '|') - G, edge_gdf = from_polygon_tool_workflow(inputDict) + G, edge_gdf = from_polygon_tool_workflow(inputDict,save_shapes=True,save_files=True) else: Exception("Check your user_input.xlsx, the input under 'network_source' is not one of the given options.") else: