Skip to content

Commit

Permalink
🚩 Refresh crossover track plotting scripts
Browse files Browse the repository at this point in the history
Main changes are an elev_filter setting in `deepicedrain.vizplots.plot_crossovers`, and the addition of annotated track numbers in the crossover_area map plot using GMT's quoted line functionality. Lots of other little bits too, including a refresh of the subglacial_lake ID (key) and name (value) pairs, a quick improvement to the inefficient brute-force crossover for-loop, and organizing figures inside 'figures/{lakename}' instead of just the 'figures' directory. Still a big chunk of local code yet to be committed, need to refactor and streamline the processing to be able to loop through many subglacial lakes in a more hands off manner!
  • Loading branch information
weiji14 committed Oct 26, 2020
1 parent 665995a commit a63fad2
Show file tree
Hide file tree
Showing 3 changed files with 116 additions and 53 deletions.
81 changes: 55 additions & 26 deletions atlxi_lake.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -917,11 +917,14 @@
"outputs": [],
"source": [
"# Save or load dhdt data from Parquet file\n",
"placename: str = \"Recovery\" # \"Whillans\"\n",
"drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys=\"NAME\")\n",
"region: deepicedrain.Region = deepicedrain.Region.from_gdf(\n",
" gdf=drainage_basins.loc[placename], name=\"Recovery Basin\"\n",
")\n",
"placename: str = \"siple_coast\" # \"Recovery\" # \"Whillans\"\n",
"try:\n",
" drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys=\"NAME\")\n",
" region: deepicedrain.Region = deepicedrain.Region.from_gdf(\n",
" gdf=drainage_basins.loc[placename], name=\"Recovery Basin\"\n",
" )\n",
"except KeyError:\n",
" pass\n",
"df_dhdt: cudf.DataFrame = cudf.read_parquet(\n",
" f\"ATLXI/df_dhdt_{placename.lower()}.parquet\"\n",
")"
Expand All @@ -935,7 +938,7 @@
"source": [
"# Antarctic subglacial lake polygons with EPSG:3031 coordinates\n",
"antarctic_lakes: gpd.GeoDataFrame = gpd.read_file(\n",
" filename=\"antarctic_subglacial_lakes.geojson\"\n",
" filename=\"antarctic_subglacial_lakes_3031.geojson\"\n",
")"
]
},
Expand All @@ -948,17 +951,28 @@
"outputs": [],
"source": [
"# Choose one draining/filling lake\n",
"draining: bool = False # False\n",
"placename: str = \"Slessor\" # \"Whillans\"\n",
"draining: bool = False\n",
"placename: str = \"Whillans\" # \"Slessor\" # \"Kamb\" # \"Mercer\" #\n",
"lakes: gpd.GeoDataFrame = antarctic_lakes.query(expr=\"basin_name == @placename\")\n",
"lake = lakes.loc[lakes.maxabsdhdt.idxmin() if draining else lakes.maxabsdhdt.idxmax()]\n",
"lake = lakes.loc[lakes.inner_dhdt.idxmin() if draining else lakes.inner_dhdt.idxmax()]\n",
"# lake = lakes.query(expr=\"inner_dhdt < 0\" if draining else \"inner_dhdt > 0\").loc[63]\n",
"lakedict = {\n",
" 76: \"Subglacial Lake Conway\", # draining lake\n",
" 78: \"Whillans IX\", # filling lake\n",
" 103: \"Slessor 45\", # draining lake\n",
" 108: \"Slessor 23\", # filling lake\n",
" 21: \"Mercer 2b\", # filling lake\n",
" 40: \"Subglacial Lake Conway\", # draining lake\n",
" 48: \"Subglacial Lake Whillans\", # filling lake\n",
" 50: \"Whillans IX\", # filling lake\n",
" 63: \"Kamb 1\", # filling lake\n",
" 65: \"Kamb 12\", # filling lake\n",
" 97: \"MacAyeal 1\", # draining lake\n",
" 109: \"Slessor 45\", # draining lake\n",
" 116: \"Slessor 23\", # filling lake\n",
" 153: \"Recovery IX\", # draining lake\n",
" 157: \"Recovery 3\", # filling lake\n",
"}\n",
"region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake.name])"
"region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake.name])\n",
"\n",
"print(lake)\n",
"lake.geometry"
]
},
{
Expand Down Expand Up @@ -1064,6 +1078,10 @@
"# Parallelized paired crossover analysis\n",
"futures: list = []\n",
"for rgt1, rgt2 in itertools.combinations(rgts, r=2):\n",
" # skip if same referencegroundtrack but different laser pair\n",
" # as they are parallel and won't cross\n",
" if rgt1[:4] == rgt2[:4]:\n",
" continue\n",
" track1 = track_dict[rgt1][[\"x\", \"y\", \"h_corr\", \"utc_time\"]]\n",
" track2 = track_dict[rgt2][[\"x\", \"y\", \"h_corr\", \"utc_time\"]]\n",
" future = client.submit(\n",
Expand Down Expand Up @@ -1161,14 +1179,20 @@
"pygmt.makecpt(cmap=\"batlow\", series=[sumstats[var][\"25%\"], sumstats[var][\"75%\"]])\n",
"# Map frame in metre units\n",
"fig.basemap(frame=\"f\", region=plotregion, projection=\"X8c\")\n",
"# Plot actual track points\n",
"# Plot actual track points in green\n",
"for track in tracks:\n",
" fig.plot(x=track.x, y=track.y, color=\"green\", style=\"c0.01c\")\n",
" tracklabel = f\"{track.iloc[0].referencegroundtrack} {track.iloc[0].pairtrack}\"\n",
" fig.plot(\n",
" x=track.x,\n",
" y=track.y,\n",
" pen=\"thinnest,green,.\",\n",
" style=f'qN+1:+l\"{tracklabel}\"+f3p,Helvetica,darkgreen',\n",
" )\n",
"# Plot crossover point locations\n",
"fig.plot(x=df.x, y=df.y, color=df.h_X, cmap=True, style=\"c0.1c\", pen=\"thinnest\")\n",
"# PLot lake boundary\n",
"# Plot lake boundary in blue\n",
"lakex, lakey = lake.geometry.exterior.coords.xy\n",
"fig.plot(x=lakex, y=lakey, pen=\"thin\")\n",
"fig.plot(x=lakex, y=lakey, pen=\"thin,blue,-.\")\n",
"# Map frame in kilometre units\n",
"fig.basemap(\n",
" frame=[\n",
Expand All @@ -1180,7 +1204,7 @@
" projection=\"X8c\",\n",
")\n",
"fig.colorbar(position=\"JMR\", frame=['x+l\"Crossover Error\"', \"y+lm\"])\n",
"fig.savefig(f\"figures/crossover_area_{placename}.png\")\n",
"fig.savefig(f\"figures/{placename}/crossover_area_{placename}_{min_date}_{max_date}.png\")\n",
"fig.show()"
]
},
Expand Down Expand Up @@ -1252,7 +1276,7 @@
"# Plot dashed line connecting points\n",
"fig.plot(x=df_max.t, y=df_max.h, pen=f\"faint,blue,-\")\n",
"fig.savefig(\n",
" f\"figures/crossover_point_{placename}_{track1}_{track2}_{min_date}_{max_date}.png\"\n",
" f\"figures/{placename}/crossover_point_{placename}_{track1}_{track2}_{min_date}_{max_date}.png\"\n",
")\n",
"fig.show()"
]
Expand All @@ -1264,8 +1288,8 @@
"outputs": [],
"source": [
"# Plot all crossover height points over time over the lake area\n",
"fig = deepicedrain.vizplots.plot_crossovers(df=df_th, regionname=region.name)\n",
"fig.savefig(f\"figures/crossover_many_{placename}_{min_date}_{max_date}.png\")\n",
"fig = deepicedrain.plot_crossovers(df=df_th, regionname=region.name)\n",
"fig.savefig(f\"figures/{placename}/crossover_many_{placename}_{min_date}_{max_date}.png\")\n",
"fig.show()"
]
},
Expand All @@ -1280,10 +1304,15 @@
"normfunc = lambda h: h - h.iloc[0] # lambda h: h - h.mean()\n",
"df_th[\"h_norm\"] = df_th.groupby(by=\"track1_track2\").h.transform(func=normfunc)\n",
"\n",
"fig = deepicedrain.vizplots.plot_crossovers(\n",
" df=df_th, regionname=region.name, elev_var=\"h_norm\"\n",
"fig = deepicedrain.plot_crossovers(\n",
" df=df_th,\n",
" regionname=region.name,\n",
" elev_var=\"h_norm\",\n",
" elev_filter=3 * abs(df.h_X).median(),\n",
")\n",
"fig.savefig(\n",
" f\"figures/{placename}/crossover_many_normalized_{placename}_{min_date}_{max_date}.png\"\n",
")\n",
"fig.savefig(f\"figures/crossover_many_normalized_{placename}_{min_date}_{max_date}.png\")\n",
"fig.show()"
]
},
Expand Down Expand Up @@ -1315,7 +1344,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
"version": "3.8.6"
}
},
"nbformat": 4,
Expand Down
77 changes: 53 additions & 24 deletions atlxi_lake.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,11 +343,14 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:

# %%
# Save or load dhdt data from Parquet file
placename: str = "Recovery" # "Whillans"
drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys="NAME")
region: deepicedrain.Region = deepicedrain.Region.from_gdf(
gdf=drainage_basins.loc[placename], name="Recovery Basin"
)
placename: str = "siple_coast" # "Recovery" # "Whillans"
try:
drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys="NAME")
region: deepicedrain.Region = deepicedrain.Region.from_gdf(
gdf=drainage_basins.loc[placename], name="Recovery Basin"
)
except KeyError:
pass
df_dhdt: cudf.DataFrame = cudf.read_parquet(
f"ATLXI/df_dhdt_{placename.lower()}.parquet"
)
Expand All @@ -356,23 +359,34 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
# %%
# Antarctic subglacial lake polygons with EPSG:3031 coordinates
antarctic_lakes: gpd.GeoDataFrame = gpd.read_file(
filename="antarctic_subglacial_lakes.geojson"
filename="antarctic_subglacial_lakes_3031.geojson"
)

# %%
# Choose one draining/filling lake
draining: bool = False # False
placename: str = "Slessor" # "Whillans"
draining: bool = False
placename: str = "Whillans" # "Slessor" # "Kamb" # "Mercer" #
lakes: gpd.GeoDataFrame = antarctic_lakes.query(expr="basin_name == @placename")
lake = lakes.loc[lakes.maxabsdhdt.idxmin() if draining else lakes.maxabsdhdt.idxmax()]
lake = lakes.loc[lakes.inner_dhdt.idxmin() if draining else lakes.inner_dhdt.idxmax()]
# lake = lakes.query(expr="inner_dhdt < 0" if draining else "inner_dhdt > 0").loc[63]
lakedict = {
76: "Subglacial Lake Conway", # draining lake
78: "Whillans IX", # filling lake
103: "Slessor 45", # draining lake
108: "Slessor 23", # filling lake
21: "Mercer 2b", # filling lake
40: "Subglacial Lake Conway", # draining lake
48: "Subglacial Lake Whillans", # filling lake
50: "Whillans IX", # filling lake
63: "Kamb 1", # filling lake
65: "Kamb 12", # filling lake
97: "MacAyeal 1", # draining lake
109: "Slessor 45", # draining lake
116: "Slessor 23", # filling lake
153: "Recovery IX", # draining lake
157: "Recovery 3", # filling lake
}
region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake.name])

print(lake)
lake.geometry

# %%
# Subset data to lake of interest
placename: str = region.name.lower().replace(" ", "_")
Expand Down Expand Up @@ -445,6 +459,10 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
# Parallelized paired crossover analysis
futures: list = []
for rgt1, rgt2 in itertools.combinations(rgts, r=2):
# skip if same referencegroundtrack but different laser pair
# as they are parallel and won't cross
if rgt1[:4] == rgt2[:4]:
continue
track1 = track_dict[rgt1][["x", "y", "h_corr", "utc_time"]]
track2 = track_dict[rgt2][["x", "y", "h_corr", "utc_time"]]
future = client.submit(
Expand Down Expand Up @@ -511,14 +529,20 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
pygmt.makecpt(cmap="batlow", series=[sumstats[var]["25%"], sumstats[var]["75%"]])
# Map frame in metre units
fig.basemap(frame="f", region=plotregion, projection="X8c")
# Plot actual track points
# Plot actual track points in green
for track in tracks:
fig.plot(x=track.x, y=track.y, color="green", style="c0.01c")
tracklabel = f"{track.iloc[0].referencegroundtrack} {track.iloc[0].pairtrack}"
fig.plot(
x=track.x,
y=track.y,
pen="thinnest,green,.",
style=f'qN+1:+l"{tracklabel}"+f3p,Helvetica,darkgreen',
)
# Plot crossover point locations
fig.plot(x=df.x, y=df.y, color=df.h_X, cmap=True, style="c0.1c", pen="thinnest")
# PLot lake boundary
# Plot lake boundary in blue
lakex, lakey = lake.geometry.exterior.coords.xy
fig.plot(x=lakex, y=lakey, pen="thin")
fig.plot(x=lakex, y=lakey, pen="thin,blue,-.")
# Map frame in kilometre units
fig.basemap(
frame=[
Expand All @@ -530,7 +554,7 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
projection="X8c",
)
fig.colorbar(position="JMR", frame=['x+l"Crossover Error"', "y+lm"])
fig.savefig(f"figures/crossover_area_{placename}.png")
fig.savefig(f"figures/{placename}/crossover_area_{placename}_{min_date}_{max_date}.png")
fig.show()


Expand Down Expand Up @@ -587,14 +611,14 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
# Plot dashed line connecting points
fig.plot(x=df_max.t, y=df_max.h, pen=f"faint,blue,-")
fig.savefig(
f"figures/crossover_point_{placename}_{track1}_{track2}_{min_date}_{max_date}.png"
f"figures/{placename}/crossover_point_{placename}_{track1}_{track2}_{min_date}_{max_date}.png"
)
fig.show()

# %%
# Plot all crossover height points over time over the lake area
fig = deepicedrain.vizplots.plot_crossovers(df=df_th, regionname=region.name)
fig.savefig(f"figures/crossover_many_{placename}_{min_date}_{max_date}.png")
fig = deepicedrain.plot_crossovers(df=df_th, regionname=region.name)
fig.savefig(f"figures/{placename}/crossover_many_{placename}_{min_date}_{max_date}.png")
fig.show()

# %%
Expand All @@ -603,10 +627,15 @@ def find_clusters(X: cudf.core.dataframe.DataFrame) -> cudf.core.series.Series:
normfunc = lambda h: h - h.iloc[0] # lambda h: h - h.mean()
df_th["h_norm"] = df_th.groupby(by="track1_track2").h.transform(func=normfunc)

fig = deepicedrain.vizplots.plot_crossovers(
df=df_th, regionname=region.name, elev_var="h_norm"
fig = deepicedrain.plot_crossovers(
df=df_th,
regionname=region.name,
elev_var="h_norm",
elev_filter=3 * abs(df.h_X).median(),
)
fig.savefig(
f"figures/{placename}/crossover_many_normalized_{placename}_{min_date}_{max_date}.png"
)
fig.savefig(f"figures/crossover_many_normalized_{placename}_{min_date}_{max_date}.png")
fig.show()

# %%
11 changes: 8 additions & 3 deletions deepicedrain/vizplots.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class IceSat2Explorer(param.Parameterized):
plot_variable = param.Selector(
default="dhdt_slope", objects=["referencegroundtrack", "dhdt_slope", "h_corr"]
)
cycle_number = param.Integer(default=7, bounds=(2, 7))
cycle_number = param.Integer(default=7, bounds=(2, 8))
dhdt_range = param.Range(default=(1.0, 10.0), bounds=(0.0, 20.0))
rasterize = param.Boolean(default=True)
datashade = param.Boolean(default=False)
Expand Down Expand Up @@ -271,6 +271,7 @@ def plot_crossovers(
time_var: str = "t",
track_var: str = "track1_track2",
spacing: float = 2.5,
elev_filter: float = 1.0,
) -> pygmt.Figure:
"""
Plot to show how elevation is changing at many crossover points over time.
Expand Down Expand Up @@ -316,6 +317,9 @@ def plot_crossovers(
Provide as a 'dy' increment, this is passed on to `pygmt.info` and used
to round up and down the y axis (elev_var) limits for a nicer plot
frame. Default is 2.5.
elev_filter : float
Minimum elevation change required for the crossover point to show up
on the plot. Default is 1.0 (metres).
Returns
-------
Expand Down Expand Up @@ -359,7 +363,7 @@ def plot_crossovers(
):
df_ = df.loc[indexes].sort_values(by=time_var)
# plot only > 1 metre height change
if df_[elev_var].max() - df_[elev_var].min() > 1.0:
if df_[elev_var].max() - df_[elev_var].min() > elev_filter:
track1, track2 = track1_track2.split("x")
fig.plot(
x=df_[time_var],
Expand All @@ -374,5 +378,6 @@ def plot_crossovers(
fig.plot(
x=df_[time_var], y=df_[elev_var], Z=i, pen=f"faint,+z,-", cmap=True
)
fig.legend(S=0.5, position="JMR+JMR+o0.2c", box="+gwhite+p1p")
with pygmt.config(FONT_ANNOT_PRIMARY="9p"):
fig.legend(S=0.8, position="JTR+jTL+o0.2c", box="+gwhite+p1p")
return fig

0 comments on commit a63fad2

Please sign in to comment.