Skip to content

Commit

Permalink
added in the option to load external data to represent street network…
Browse files Browse the repository at this point in the history
… intersections, as per #302, and related to #158
  • Loading branch information
carlhiggs committed Jun 16, 2023
1 parent a0136c7 commit 0da4569
Show file tree
Hide file tree
Showing 4 changed files with 100 additions and 68 deletions.
4 changes: 4 additions & 0 deletions process/configuration/regions/example_ES_Las_Palmas_2023.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ network:
polygon_iteration: false
connection_threshold:
intersection_tolerance: 12
# intersections:
# data: network_data/your_intersection_data.geojson
# citation: 'Provider of your intersection data. YYYY. Name of your intersection data. https://source-url-for-your-data.place'
# note: 'Uncomment this configuration section to optionally specify an external dataset of intersections. Otherwise, these are derived using OpenStreetMap and OSMnx using the intersection_tolerance parameter. If providing intersection data, you can modify this note for it to be included in the metadata, or remove it.
urban_region:
name: "Global Human Settlements urban centres: 2015 (EU JRC, 2019; Las Palmas de Gran Canaria only)"
data_dir: "urban_regions/Example/Las Palmas de Gran Canaria - GHS_STAT_UCDB2015MT_GLOBE_R2019A_V1_2.gpkg"
Expand Down
50 changes: 10 additions & 40 deletions process/subprocesses/_01_create_study_region.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,13 @@
"""Set up study region boundaries."""

import os
import subprocess as sp
import sys
import time

import geopandas as gpd

# Set up project and region parameters for GHSCIC analyses
import ghsci
import numpy as np
import pandas as pd
from geoalchemy2 import Geometry, WKTElement
from script_running_log import script_running_log
from shapely.geometry import MultiPolygon, Polygon
from sqlalchemy import inspect, text
from sqlalchemy import text


def create_study_region(codename):
Expand All @@ -37,13 +30,6 @@ def create_study_region(codename):
os.makedirs(f'{ghsci.folder_path}/process/data/_study_region_outputs')
if not os.path.exists(r.config['region_dir']):
os.makedirs(r.config['region_dir'])
# if {
# 'study_region_boundary',
# 'urban_region',
# 'urban_study_region',
# r.config['buffered_urban_study_region'],
# }.issubset(r.tables):
# sys.exit(f"""Study region boundaries have previously been created ({codename}, urban_region, urban_study_region and {r.config['buffered_urban_study_region']}). If you wish to recreate these, please manually drop them (e.g. using psql) or optionally drop the {r.config['db']} database and start again (e.g. using the subprocesses/_drop_study_region_database.py utility script.\n""")
print('Create study region boundary... ')
# import study region policy-relevant administrative boundary, or GHS boundary
try:
Expand Down Expand Up @@ -72,22 +58,12 @@ def create_study_region(codename):
else:
boundary_data = area_data
query = ''
command = (
' ogr2ogr -overwrite -progress -f "PostgreSQL" '
f' PG:"host={db_host} port={db_port} dbname={db}'
f' user={db_user} password={db_pwd}" '
f' "{boundary_data}" '
f' -lco geometry_name="geom" -lco precision=NO '
f' -t_srs {crs_srid} -nln "study_region_boundary" '
f' -nlt PROMOTE_TO_MULTI'
f' {query}'
r.ogr_to_db(
source=boundary_data,
layer='study_region_boundary',
query=query,
promote_to_multi=True,
)
print(command)
failure = sp.call(command, shell=True)
if failure == 1:
sys.exit(
f"Error reading in boundary data '{area_data}' (check format)",
)
except Exception as e:
raise Exception(f'Error reading in boundary data (check format): {e}')
print('\nCreate urban region boundary... ', end='', flush=True)
Expand Down Expand Up @@ -143,17 +119,11 @@ def create_study_region(codename):
,"study_region_boundary" b
WHERE ST_Intersects(ST_Union(a.geom),ST_Union(b.geom))
"""
command = (
' ogr2ogr -overwrite -progress -f "PostgreSQL" '
f' PG:"host={db_host} port={db_port} dbname={db}'
f' user={db_user} password={db_pwd}" '
f""" "{r.config['urban_region']['data_dir']}" """
f' -lco geometry_name="geom" -lco precision=NO '
f' -t_srs {crs_srid} -nln full_urban_region '
f' {query} '
r.ogr_to_db(
source=r.config['urban_region']['data_dir'],
layer='full_urban_region',
query=query,
)
print(command)
sp.call(command, shell=True)
sql = f"""
CREATE TABLE IF NOT EXISTS urban_region AS
SELECT '{name}'::text AS "study_region",
Expand Down
61 changes: 36 additions & 25 deletions process/subprocesses/_03_create_network_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
Create pedestrian street networks for specified city.
"""

import os
import sys
import time
from datetime import datetime
Expand Down Expand Up @@ -189,29 +190,41 @@ def gdf_to_postgis_format(gdf, engine, table, geometry_name='geom'):
)


def clean_intersections(G_proj, r):
"""Generate cleaned intersections using OSMnx and store in postgis database, or otherwise retrieve them."""
def load_intersections(r, G_proj):
"""Prepare intersections using a configured data source, or OSMnx to derive these, and store in postgis database."""
if r.config['intersections_table'] not in r.tables:
## Copy clean intersections to postgis
print('\nPrepare and copy clean intersections to postgis... ')
# Clean intersections
intersections = ox.consolidate_intersections(
G_proj,
tolerance=r.config['network']['intersection_tolerance'],
rebuild_graph=False,
dead_ends=False,
)
intersections = gpd.GeoDataFrame(
intersections, columns=['geom'],
).set_geometry('geom')
with r.engine.connect() as connection:
intersections.to_postgis(
r.config['intersections_table'], connection, index=True,
if (
r.config['intersections_table']
== f"intersections_osmnx_{r.config['network']['intersection_tolerance']}m"
):
print(
f"\nRepresent intersections using OpenStreetMap derived data using OSMnx consolidate intersections function with tolerance of {r.config['network']['intersection_tolerance']} metres... ",
)
intersections = ox.consolidate_intersections(
G_proj,
tolerance=r.config['network']['intersection_tolerance'],
rebuild_graph=False,
dead_ends=False,
)
intersections = gpd.GeoDataFrame(
intersections, columns=['geom'],
).set_geometry('geom')
with r.engine.connect() as connection:
intersections.to_postgis(
r.config['intersections_table'], connection, index=True,
)
else:
print(
f"\nRepresent intersections using configured data {r.config['network']['intersections']['data']}... ",
)
r.ogr_to_db(
source=f"/home/ghsci/process/data/{r.config['network']['intersections']['data']}",
layer=r.config['intersections_table'],
)
print(' - Done.')
else:
print(
'It appears that clean intersection data has already been prepared and imported for this region.',
'It appears that intersection data has already been prepared and imported for this region.',
)


Expand Down Expand Up @@ -274,13 +287,11 @@ def create_network_resources(codename):
G_proj = generate_pedestrian_network_nodes_edges(
r, ghsci.settings['network_analysis']['pedestrian'],
)
clean_intersections(G_proj, r)

create_pgrouting_network_topology(r)

# ensure user is granted access to the newly created tables
with r.engine.begin() as connection:
connection.execute(text(ghsci.grant_query))
create_pgrouting_network_topology(r)
load_intersections(r, G_proj)
# ensure user is granted access to the newly created tables
with r.engine.begin() as connection:
connection.execute(text(ghsci.grant_query))

# output to completion log
script_running_log(r.config, script, task, start)
Expand Down
53 changes: 50 additions & 3 deletions process/subprocesses/ghsci.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,44 @@ def get_geojson(
finally:
return geojson

def ogr_to_db(
self,
source: str,
layer: str,
query: str = '',
promote_to_multi: bool = False,
source_crs: str = None,
):
"""Read spatial data with ogr2ogr and save to Postgis database."""
import subprocess as sp

name = self.config['name']
crs_srid = self.config['crs_srid']
db = self.config['db']
db_host = self.config['db_host']
db_port = self.config['db_port']
db_user = self.config['db_user']
db_pwd = self.config['db_pwd']
if source_crs is not None:
# Some spatial data files may require clarification of the source coordinate reference system
# If this is required, source_crs can be defined, e.g. 'EPSG:4326' in the case of a WGS84 source
s_srs = f'-s_srs {source_crs}'
else:
s_srs = ''
if promote_to_multi:
multi = '-nlt PROMOTE_TO_MULTI'
else:
multi = ''
command = f' ogr2ogr -overwrite -progress -f "PostgreSQL" PG:"host={db_host} port={db_port} dbname={db} user={db_user} password={db_pwd}" "{source}" -lco geometry_name="geom" -lco precision=NO -t_srs {crs_srid} {s_srs} -nln "{layer}" {multi} {query}'
failure = sp.run(command, shell=True)
print(failure)
if failure == 1:
sys.exit(
f"Error reading in data for {layer} '{source}'; please check format and configuration.",
)
else:
return failure

def to_csv(self, table, file, drop=['geom'], index=False):
"""Write an SQL table or query to a csv file."""
df = self.get_df(table)
Expand Down Expand Up @@ -409,9 +447,18 @@ def region_dictionary_setup(self, codename, region_config, folder_path):
'osm_region'
] = f'{r["region_dir"]}/{codename}_{r["osm_prefix"]}.pbf'
r['codename_poly'] = f'{r["region_dir"]}/poly_{r["db"]}.poly'
r[
'intersections_table'
] = f"clean_intersections_{r['network']['intersection_tolerance']}m"
if (
'intersections' in r['network']
and r['network']['intersections'] is not None
):
intersections = os.path.splitext(
os.path.basename(r['network']['intersections']['data']),
)[0]
r['intersections_table'] = f'intersections_{intersections}'
else:
r[
'intersections_table'
] = f"intersections_osmnx_{r['network']['intersection_tolerance']}m"
r['gpkg'] = f'{r["region_dir"]}/{codename}_{study_buffer}m_buffer.gpkg'
r['point_summary'] = 'indicators_sample_points'
r['grid_summary'] = f'indicators_{resolution}'
Expand Down

0 comments on commit 0da4569

Please sign in to comment.