Skip to content

Commit

Permalink
Merge pull request #1013 from PyPSA/store-network-shapes
Browse files Browse the repository at this point in the history
Store network shapes
  • Loading branch information
FabianHofmann authored Apr 17, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
2 parents a835f50 + b993e6a commit 0a19d90
Showing 5 changed files with 104 additions and 29 deletions.
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
@@ -184,6 +184,8 @@ Upcoming Release

* Fix custom busmap read in `cluster_network`.

* Added shapes to .nc file for different stages of the network object in `base_network`, `build_bus_regions`, and `cluster_network`.

* Fix p_nom_min of renewables generators for myopic approach and add check of existing capacities in `add_land_use_constraint_m`.

* Add documentation section for how to contribute documentation
19 changes: 19 additions & 0 deletions scripts/base_network.py
Original file line number Diff line number Diff line change
@@ -698,6 +698,22 @@ def _adjust_capacities_of_under_construction_branches(n, config):
return n


def _set_shapes(n, country_shapes, offshore_shapes):
# Write the geodataframes country_shapes and offshore_shapes to the network.shapes component
country_shapes = gpd.read_file(country_shapes).rename(columns={"name": "idx"})
country_shapes["type"] = "country"
offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"})
offshore_shapes["type"] = "offshore"
all_shapes = pd.concat([country_shapes, offshore_shapes], ignore_index=True)
n.madd(
"Shape",
all_shapes.index,
geometry=all_shapes.geometry,
idx=all_shapes.idx,
type=all_shapes["type"],
)


def base_network(
eg_buses,
eg_converters,
@@ -758,11 +774,14 @@ def base_network(

n = _adjust_capacities_of_under_construction_branches(n, config)

_set_shapes(n, country_shapes, offshore_shapes)

return n


if __name__ == "__main__":
if "snakemake" not in globals():

from _helpers import mock_snakemake

snakemake = mock_snakemake("base_network")
48 changes: 41 additions & 7 deletions scripts/build_bus_regions.py
Original file line number Diff line number Diff line change
@@ -109,6 +109,34 @@ def voronoi_partition_pts(points, outline):
return polygons


def append_bus_shapes(n, shapes, type):
"""
Append shapes to the network. If shapes with the same component and type
already exist, they will be removed.
Parameters:
n (pypsa.Network): The network to which the shapes will be appended.
shapes (geopandas.GeoDataFrame): The shapes to be appended.
**kwargs: Additional keyword arguments used in `n.madd`.
Returns:
None
"""
remove = n.shapes.query("component == 'Bus' and type == @type").index
n.mremove("Shape", remove)

offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0
shapes = shapes.rename(lambda x: int(x) + offset)
n.madd(
"Shape",
shapes.index,
geometry=shapes.geometry,
idx=shapes.name,
component="Bus",
type=type,
)


if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
@@ -119,7 +147,8 @@ def voronoi_partition_pts(points, outline):

countries = snakemake.params.countries

n = pypsa.Network(snakemake.input.base_network)
base_network = snakemake.input.base_network
n = pypsa.Network(base_network)

country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index("name")[
"geometry"
@@ -173,12 +202,17 @@ def voronoi_partition_pts(points, outline):
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c)

pd.concat(onshore_regions, ignore_index=True).to_file(
snakemake.output.regions_onshore
)
shapes = pd.concat(onshore_regions, ignore_index=True)
shapes.to_file(snakemake.output.regions_onshore)
append_bus_shapes(n, shapes, "onshore")

if offshore_regions:
pd.concat(offshore_regions, ignore_index=True).to_file(
snakemake.output.regions_offshore
)
shapes = pd.concat(offshore_regions, ignore_index=True)
shapes.to_file(snakemake.output.regions_offshore)
append_bus_shapes(n, shapes, "offshore")

else:
offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore)

# save network with shapes
n.export_to_netcdf(base_network)
48 changes: 30 additions & 18 deletions scripts/cluster_network.py
Original file line number Diff line number Diff line change
@@ -135,6 +135,7 @@
import seaborn as sns
from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from add_electricity import load_costs
from build_bus_regions import append_bus_shapes
from packaging.version import Version, parse
from pypsa.clustering.spatial import (
busmap_by_greedy_modularity,
@@ -428,20 +429,27 @@ def clustering_for_n_clusters(
return clustering


def cluster_regions(busmaps, input=None, output=None):
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
def cluster_regions(busmaps, regions):
"""
Cluster regions based on busmaps and save the results to a file and to the
network.
for which in ("regions_onshore", "regions_offshore"):
regions = gpd.read_file(getattr(input, which))
regions = regions.reindex(columns=["name", "geometry"]).set_index("name")
regions_c = regions.dissolve(busmap)
regions_c.index.name = "name"
regions_c = regions_c.reset_index()
regions_c.to_file(getattr(output, which))
Parameters:
- busmaps (list): A list of busmaps used for clustering.
- which (str): The type of regions to cluster.
Returns:
None
"""
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
regions = regions.reindex(columns=["name", "geometry"]).set_index("name")
regions_c = regions.dissolve(busmap)
regions_c.index.name = "name"
return regions_c.reset_index()


def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
busmap = busmap_for_n_clusters(n, n_clusters)
def plot_busmap_for_n_clusters(n, n_clusters, solver_name="scip", fn=None):
busmap = busmap_for_n_clusters(n, n_clusters, solver_name)
cs = busmap.unique()
cr = sns.color_palette("hls", len(cs))
n.plot(bus_colors=busmap.map(dict(zip(cs, cr))))
@@ -538,21 +546,25 @@ def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
params.focus_weights,
)

update_p_nom_max(clustering.network)
nc = clustering.network
update_p_nom_max(nc)

if params.cluster_network.get("consider_efficiency_classes"):
labels = [f" {label} efficiency" for label in ["low", "medium", "high"]]
nc = clustering.network
nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True)

clustering.network.meta = dict(
snakemake.config, **dict(wildcards=dict(snakemake.wildcards))
)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in (
"busmap",
"linemap",
): # also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(snakemake.output[attr])

cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)
nc.shapes = n.shapes.copy()
for which in ["regions_onshore", "regions_offshore"]:
regions = gpd.read_file(snakemake.input[which])
clustered_regions = cluster_regions((clustering.busmap,), regions)
clustered_regions.to_file(snakemake.output[which])
append_bus_shapes(nc, clustered_regions, type=which.split("_")[1])

nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
nc.export_to_netcdf(snakemake.output.network)
16 changes: 12 additions & 4 deletions scripts/simplify_network.py
Original file line number Diff line number Diff line change
@@ -88,12 +88,14 @@
import logging
from functools import reduce

import geopandas as gpd
import numpy as np
import pandas as pd
import pypsa
import scipy as sp
from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from add_electricity import load_costs
from build_bus_regions import append_bus_shapes
from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import (
aggregateoneport,
@@ -610,6 +612,7 @@ def cluster(
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)

if snakemake.wildcards.simpl:
shapes = n.shapes
n, cluster_map = cluster(
n,
int(snakemake.wildcards.simpl),
@@ -619,14 +622,19 @@ def cluster(
params.simplify_network["feature"],
params.aggregation_strategies,
)
n.shapes = shapes
busmaps.append(cluster_map)

update_p_nom_max(n)

n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output.network)

busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
busmap_s.to_csv(snakemake.output.busmap)

cluster_regions(busmaps, snakemake.input, snakemake.output)
for which in ["regions_onshore", "regions_offshore"]:
regions = gpd.read_file(snakemake.input[which])
clustered_regions = cluster_regions(busmaps, regions)
clustered_regions.to_file(snakemake.output[which])
append_bus_shapes(n, clustered_regions, type=which.split("_")[1])

n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output.network)

0 comments on commit 0a19d90

Please sign in to comment.