Skip to content

Commit

Permalink
Merge dev to main 2024-12-16
Browse files Browse the repository at this point in the history
  • Loading branch information
alfhj authored Dec 16, 2024
2 parents 598fd5b + 2386324 commit 3861aaa
Show file tree
Hide file tree
Showing 8 changed files with 178 additions and 49 deletions.
20 changes: 15 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
# nvdb2xodr
Convert Vegvesen NVDB road network to OpenDrive
Convert Vegvesen NVDB road networks to OpenDRIVE files

## Usage:
1. Install requirements: `pip install -r requirements.txt`
2. Make a new configuration of the area you want to convert in `run_tools.py` and make sure `run_download_data()` is run in `__main__`
3. Run the download script: `run_tools.py`
4. Make a new configuration in `nvdb_to_opendrive.py` using the same JSON file
5. Run it: `python nvdb_to_opendrive.py`

## TODO:
- [x] Junctions
- [ ] Fix roundabouts and missing roads
- [ ] Incorporate optional FKB data
- [ ] Handle changing number of lanes properly
- [ ] Infer turn restrictions
- [ ] Incorporate optional road polygon data
- [ ] Expand to OSM dataset
- [ ] Lane access for bus lanes
- [ ] Setting types for exit ramps
- [ ] Setting types for exit ramps
- [x] Fix roundabouts and missing roads
- [x] Junctions
84 changes: 56 additions & 28 deletions nvdb_to_opendrive.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
import re
from dataclasses import dataclass
from datetime import datetime

import lxml.etree as ET
from lxml.etree import Element, ElementTree
from datetime import datetime
from shapely import from_wkt, get_num_points, line_merge
from shapely.ops import linemerge, unary_union
from shapely.geometry import Point, LineString, MultiLineString
from src.utils import *
from src.constants import JUNCTION_MARGIN, SAVE_RELATIVE_COORDINATES, detail_levels, road_types
from src.road import JunctionConnection, JunctionRoad, LaneType, Road, RoadNetwork, RoadSegment
from src.constants import CENTER_COORDS, JUNCTION_MARGIN, road_types, detail_levels
from src.utils import *


@dataclass
class Config:
input_file: str
output_file: str
boundary: str


def SubElement(parent: Element, **kwargs):
Expand Down Expand Up @@ -98,14 +104,17 @@ def split_road_into_parts(roads: list):
def startBasicXODRFile() -> Element:
root = ET.Element("OpenDRIVE")
header = ET.SubElement(root, "header", revMajor="1", revMinor="6", name="Glosehaugen", version="0.02", date=datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
#ET.SubElement(header, "geoReference").text = ET.CDATA(f"+proj=tmerc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=GRS80 +units=m +vunits=m")
ET.SubElement(header, "geoReference").text = ET.CDATA(f"+proj=tmerc +lat_0={CENTER_COORDS[0]} +lon_0={CENTER_COORDS[1]} +x_0=0 +y_0=0 +ellps=GRS80 +units=m +vunits=m")
if SAVE_RELATIVE_COORDINATES:
ET.SubElement(header, "geoReference").text = ET.CDATA(f"+proj=tmerc +lat_0={CENTER_COORDS[0]} +lon_0={CENTER_COORDS[1]} +x_0=0 +y_0=0 +ellps=GRS80 +units=m +vunits=m")
else:
ET.SubElement(header, "geoReference").text = ET.CDATA(f"+proj=utm +zone=33 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs")

return root


def generate_single_road(sequence: dict, road_object: Road) -> Element:
# start road
road_name = sequence.get("adresse", f"Road {road_object.id}")
road_name = f"{sequence.get('adresse', 'Road')} ({sequence['veglenkesekvensid']})"
road = ET.Element("road", name=road_name, id=road_object.id, rule="RHT", junction="-1")
link = ET.SubElement(road, "link")
roadType = ET.SubElement(road, "type", s="0", type="town")
Expand All @@ -117,7 +126,8 @@ def generate_single_road(sequence: dict, road_object: Road) -> Element:

total_length = 0
for p in road_object.reference_line[:-1]:
geometry = ET.Element("geometry", s=str(total_length), x=str(p.x), y=str(p.y), hdg=str(p.heading), length=str(p.length))
x, y = (p.x, p.y) if SAVE_RELATIVE_COORDINATES else get_utm_coordinates(p.x, p.y)
geometry = ET.Element("geometry", s=str(total_length), x=str(x), y=str(y), hdg=str(p.heading), length=str(p.length))
ET.SubElement(geometry, "line")
ET.SubElement(elevationProfile, "elevation", s=str(total_length), a=str(p.z), b=str(p.slope), c="0", d="0")

Expand Down Expand Up @@ -150,36 +160,43 @@ def generate_single_road(sequence: dict, road_object: Road) -> Element:


def generate_road_sequence(root: Element, sequence: dict, nodes: dict[int, list[int]], road_network: RoadNetwork, next_id: int):
chains = filter_road_sequence(sequence)
chains = filter_road_sequence(sequence) # TODO: use nodes as explicit link between chains instead of start position order
portals_to_nodes = {portal["id"]: portal["tilkobling"]["nodeid"] for portal in sequence["porter"]}

start_node_id = None
road_segments = []
id_suffix = 1
for i, chain in enumerate(chains):
if "feltoversikt" not in chain:
continue
lanes_list = chain.get("feltoversikt")
if lanes_list is None:
if chain["type"] == "KONNEKTERING":
lanes_list = ["1", "2"] # assume two lanes on KONNEKTERING - TODO: use same as closest neighbour
else:
continue

line_string = chain["geometri"]["wkt"]
if line_string.startswith("LINESTRING Z"):
points_string = re.search(r"LINESTRING Z\((.*)\)", chain["geometri"]["wkt"]).group(1)
points_list = [tuple(float(p) for p in ps.strip().split(" ")) for ps in points_string.split(",")]
else: # TODO: figure out what to do with missing height coordinates
points_string = re.search(r"LINESTRING \((.*)\)", chain["geometri"]["wkt"]).group(1)
points_list = [tuple([float(p) for p in ps.strip().split(" ")] + [0]) for ps in points_string.split(",")]

points_string = re.search(r"LINESTRING Z\((.*)\)", chain["geometri"]["wkt"]).group(1)
points_list = [tuple(float(p) for p in ps.strip().split(" ")) for ps in points_string.split(",")]
if len(points_list) < 2:
continue

lanes_list = chain["feltoversikt"]
road_segment = RoadSegment(points_list)
road_segment.add_nvdb_lanes(lanes_list, chain.get("vegbredde"))
road_segments.append(road_segment)

start_node_id = start_node_id if start_node_id is not None else portals_to_nodes[chain["startport"]]
end_node_id = portals_to_nodes[chain["sluttport"]]

#print(len(nodes[end_node_id]))
if len(nodes[end_node_id]) <= 2 and i != len(chains) - 1: # normal road connection and not at end
continue # merge current with next road segment

# make road
#road_id = f"{sequence['veglenkesekvensid']}_{id_suffix}"
road_id = str(next_id)
road_id = str(next_id) # f"{sequence['veglenkesekvensid']}_{id_suffix}"
road_object = Road(road_segments, road_id, shorten=JUNCTION_MARGIN)
road_network.add_road(road_object)
road_network.add_junction(str(start_node_id), JunctionConnection(road_object, start=True))
Expand Down Expand Up @@ -209,7 +226,8 @@ def generate_junction_road(road_object: JunctionRoad, junction_id: str, in_road:
# create geometry
planView = ET.SubElement(road, "planView")
elevationProfile = ET.SubElement(road, "elevationProfile")
geometry = ET.SubElement(planView, "geometry", s="0", x=str(road_object.start_point.x), y=str(road_object.start_point.y), hdg=str(road_object.start_point.heading), length=str(road_object.length))
x, y = (road_object.start_point.x, road_object.start_point.y) if SAVE_RELATIVE_COORDINATES else get_utm_coordinates(road_object.start_point.x, road_object.start_point.y)
geometry = ET.SubElement(planView, "geometry", s="0", x=str(x), y=str(y), hdg=str(road_object.start_point.heading), length=str(road_object.length))
ET.SubElement(geometry, "paramPoly3", aU=str(aU), aV=str(aV), bU=str(bU), bV=str(bV), cU=str(cU), cV=str(cV), dU=str(dU), dV=str(dV), pRange="normalized")
ET.SubElement(elevationProfile, "elevation", s="0", a=str(road_object.start_point.z), b=str(road_object.slope), c="0", d="0")

Expand Down Expand Up @@ -312,13 +330,11 @@ def generate_junctions(root: Element, road_network: RoadNetwork, next_id: int):
root.extend(junction_elements)


if __name__ == "__main__":
input_file = "veglenkesekvens2a.json"
output_file = "../OpenDrive/gloshaugen_nvdb.xodr"
print(f"Converting NVDB file {input_file} to OpenDrive format")
def main(config: Config):
print(f"Converting NVDB file {config.input_file} to OpenDrive format")
start_time = datetime.now()

roads = load_json(get_file_path(input_file))
roads = load_json(config.input_file)

merge_linked_locations(roads)
nodes = get_nodes(roads)
Expand All @@ -336,6 +352,10 @@ def generate_junctions(root: Element, road_network: RoadNetwork, next_id: int):
# set min/max coordinates
header = root.find("header")
minx, miny, maxx, maxy = road_network.minmax_xy
if not SAVE_RELATIVE_COORDINATES:
minx, miny = get_utm_coordinates(minx, miny)
maxx, maxy = get_utm_coordinates(maxx, maxy)

header.set("west", str(minx))
header.set("south", str(miny))
header.set("east", str(maxx))
Expand All @@ -344,7 +364,15 @@ def generate_junctions(root: Element, road_network: RoadNetwork, next_id: int):
#ElementTree.tostring(xodr, xml_declaration=True)
ET.indent(root, space=" ")
#print(ET.tostring(xodr, doctype='<?xml version="1.0" encoding="UTF-8"?>', pretty_print=True).decode())
ElementTree(root).write(get_file_path(output_file), doctype='<?xml version="1.0" encoding="UTF-8"?>', encoding="utf-8")
ElementTree(root).write(config.output_file, doctype='<?xml version="1.0" encoding="UTF-8"?>', encoding="utf-8")

total_time = (datetime.now() - start_time).total_seconds()
print(f"Finished in {total_time:.2f} seconds")


if __name__ == "__main__":
gløshaugen = Config("../Notebooks/veglenkesekvens_gloshaugen.json", "../OpenDrive/gloshaugen_nvdb.xodr", "270000,7039700,271200,7041000")
sandmoen = Config("../Notebooks/veglenkesekvens_sandmoen.json", "../OpenDrive/sandmoen_nvdb.xodr", "267500,7030500,268500,7031500")
sandmoen1 = Config("../Notebooks/nvdb_multi_sandmoen/veglenkesekvens_sandmoen_5000.json", "../OpenDrive/gloshaugen_test.xodr", "270000,7039700,271200,7041000")

total_time = datetime.now() - start_time
print(f"Finished in {total_time.total_seconds():.2f} seconds")
main(sandmoen)
6 changes: 6 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
lxml
numpy
pyproj
requests
shapely
tqdm
87 changes: 86 additions & 1 deletion run_tools.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,88 @@
import json
import os
import uuid
from tools import download_data
import nvdb_to_opendrive
from pathlib import Path
from time import time

download_data.run()

def run_download_data():
gløshaugen = ("270000,7039700,271200,7041000", "../Notebooks/veglenkesekvens_gloshaugen.json")
sandmoen = ("267500,7030500,268500,7031500", "../Notebooks/veglenkesekvens_sandmoen.json")

download_data.run(*sandmoen)


def benchmark_function(function, measure_memory=True):
if measure_memory:
import tracemalloc
tracemalloc.start()

start_time = time()
result = function()
total_time = time() - start_time

response = {"execution_time": total_time}
if result is not None:
response["result"] = result

if measure_memory:
memory_usage = tracemalloc.get_traced_memory()
response["memory_usage"] = memory_usage
tracemalloc.stop()

return response


def run_download_data_multi():
center = (268000, 7031000)
width_min = 500
width_max = 5000
width_delta = 100
def output_file(width): return f"../Notebooks/nvdb_multi_sandmoen/veglenkesekvens_sandmoen_{width}.json"
output_path = "../Notebooks/nvdb_download_times_sandmoen.json"

Path(output_file("")).parent.mkdir(exist_ok=True)

benchmark_results = []
for width in range(width_min, width_max + 1, width_delta):
print(f"Downloading area of size {width}*{width} m^2")
boundary = f"{center[0]-width//2},{center[1]-width//2},{center[0]+width//2},{center[1]+width//2}"
result = benchmark_function(lambda: download_data.run(boundary, output_file(width)), measure_memory=False)
result["width"] = width
benchmark_results.append(result)

with open(output_path, "w") as f:
json.dump(benchmark_results, f, indent=4)


def run_nvdb_to_opendrive_multi(measure_memory=True):
input_path = "../Notebooks/nvdb_multi_sandmoen"
output_path = "../Notebooks/nvdb_execution_times_sandmoen.json"

files = {}
for file in Path(input_path).glob("*.json"):
width = int(file.stem.split("_")[-1])
files[width] = file

benchmark_results = []
for width, file in sorted(files.items()):
print(f"Running nvdb_to_opendrive.py on area of size {width}*{width} m^2")
output_file = f"temp-{uuid.uuid4()}.xodr"
config = nvdb_to_opendrive.Config(str(file), output_file, "")

result = benchmark_function(lambda: nvdb_to_opendrive.main(config))
result["width"] = width
benchmark_results.append(result)

os.remove(output_file)

with open(output_path, "w") as f:
json.dump(benchmark_results, f, indent=4)


if __name__ == "__main__":
run_download_data()
#run_download_data_multi()
#run_nvdb_to_opendrive_multi()
2 changes: 1 addition & 1 deletion src/constants.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
DATA_PATH = "../Notebooks" # where to load data from
CENTER_COORDS = (63.41771242884644, 10.40335350836009) # x and y coordinates are specified in meters from this point
JUNCTION_MARGIN = 10 # junctions are generated by shrinking roads JUNCTION_MARGIN meters away from the connection point of the junction, then connecting every lane of the roads going into the junction
BIKING_WIDTH = 1.5 # default width for bike lanes
DRIVING_WIDTH = 3.5 # default width for car lanes
SAVE_RELATIVE_COORDINATES = False # save relative coordinates to resulting file where (0, 0) is the center of the data. This is necessary for software like esmini which create artefacts due to floating point errors


#detail_levels = set(chain.get("detaljnivå") for seq in nvdb for chain in seq["veglenker"])
Expand Down
8 changes: 6 additions & 2 deletions src/road.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,10 @@ def __init__(self, road_segments: list[RoadSegment], road_id: str, shorten: floa

x1, y1 = get_relative_coordinates(x1, y1)
x2, y2 = get_relative_coordinates(x2, y2)

if x1 == x2 and y1 == y2:
continue # TODO: handle dataset errors, e.g. at 269688.312 7039242.958

heading = get_heading(x1, y1, x2, y2)
length = get_length(x1, y1, x2, y2)
slope = (z2 - z1) / length
Expand Down Expand Up @@ -167,8 +171,8 @@ def __init__(self, start_point: ReferenceLinePoint, end_point: ReferenceLinePoin
self.start_point = start_point
self.params = road_params
self.length = road_length
self.slope = (end_point.z - start_point.z) / road_length
self.width_b = (out_width - in_width) / road_length
self.slope = (end_point.z - start_point.z) / road_length if road_length != 0 else 0 # TODO: fix 0 length roads
self.width_b = (out_width - in_width) / road_length if road_length != 0 else 0


@ dataclass
Expand Down
13 changes: 6 additions & 7 deletions src/utils.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
from math import atan2, cos, pi, sin, sqrt, tau
from math import atan2, cos, pi, sin, tau
from pathlib import Path
from shapely.geometry import Point, LineString
import numpy as np
import json

import pyproj

from .constants import CENTER_COORDS, DATA_PATH
from .constants import CENTER_COORDS

#transform = pyproj.Transformer.from_crs("EPSG:4326", "EPSG:5973")
transform = pyproj.Transformer.from_crs("EPSG:4326", "EPSG:25833")
Expand All @@ -23,10 +22,6 @@ def load_json(path):
return json.load(f)


def get_file_path(filename):
return Path(DATA_PATH).joinpath(filename)


def rotate(angle, phi):
return (angle + phi) % tau

Expand All @@ -53,6 +48,10 @@ def get_relative_coordinates(x, y):
return (x - center[0], y - center[1])


def get_utm_coordinates(x, y):
return (x + center[0], y + center[1])


def get_uv_coordinates(x1, y1, h1, x2, y2, h2):
"""Get uv coordinates of point (x2, y2) relative to (x1, y1) and its heading h1
The U-axis will point in the same direction as h1, and the V-axis will point perpendicular to it
Expand Down
7 changes: 2 additions & 5 deletions tools/download_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,7 @@ def merge_data(addresses, roads, widths):
return road_merged


def run():
#boundary = "270000,7039700,271200,7041000" # gløshaugen
boundary = "267500,7030500,268500,7031500" # sandmoen

def run(boundary: str, output_file: str):
print("Downloading roads")
roads = get_roads(boundary)

Expand All @@ -87,4 +84,4 @@ def run():
road_merged = merge_data(addresses, roads, widths)

print("Writing JSON output")
dump_json(road_merged, "veglenkesekvens_sandmoen.json")
dump_json(road_merged, output_file)

0 comments on commit 3861aaa

Please sign in to comment.