Skip to content

Commit

Permalink
✨ Active subglacial lake mega-cluster analysis
Browse files Browse the repository at this point in the history
Look at not one but multiple active subglacial lakes! Examples include: Lake 78, Subglacial Lake Conway, Subglacial Lake Mercer and Subglacial Lake Whillans on the Siple Coast; and Recovery 2 on Recovery Glacier. Also renumbered all the lake ids for the single lakes. The subglacial_lakes.feature file splits the single and mega-cluster lake tests. Note that the mega-cluster test won't run on CI until deepicedrain v0.4.0 is released.

Really required a lot of refactoring to handle multiple lake polygons. Bounding box region is now determined using a convex hull so that it works for single or multiple polygons. Lakes are dissolved into a MultiPolygon (geopandas v0.8.0 requiring a dissolve field, problematic for Lake 78 that spans Whillans and Mercer basins). The lake outlines are saved to an OGR GMT format which is actually quite nice as it can be shared around.
  • Loading branch information
weiji14 committed Dec 5, 2020
1 parent 1dac092 commit c93eded
Show file tree
Hide file tree
Showing 7 changed files with 163 additions and 96 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,8 @@ ATL11.001
ATL11.001z123
ATLXI
Quantarctica3

# Subglacial Lake grid files and figures
figures/**/*.gif
figures/**/*.nc
figures/**/*.png
62 changes: 34 additions & 28 deletions atlxi_lake.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -922,7 +922,7 @@
"outputs": [],
"source": [
"# Save or load dhdt data from Parquet file\n",
"placename: str = \"siple_coast\" # \"Recovery\" # \"Whillans\"\n",
"placename: str = \"siple_coast\" # \"slessor_downstream\" # \"Recovery\" # \"Whillans\"\n",
"try:\n",
" drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys=\"NAME\")\n",
" region: deepicedrain.Region = deepicedrain.Region.from_gdf(\n",
Expand All @@ -942,7 +942,8 @@
"outputs": [],
"source": [
"# Antarctic subglacial lake polygons with EPSG:3031 coordinates\n",
"antarctic_lakes: gpd.GeoDataFrame = deepicedrain.catalog.subglacial_lakes.read()"
"antarctic_lakes: gpd.GeoDataFrame = deepicedrain.catalog.subglacial_lakes.read()\n",
"antarctic_lakes = antarctic_lakes.set_crs(epsg=3031, allow_override=True)"
]
},
{
Expand Down Expand Up @@ -986,24 +987,30 @@
"# Choose one draining/filling lake\n",
"draining: bool = False\n",
"placename: str = \"Whillans\" # \"Slessor\" # \"Kamb\" # \"Mercer\" #\n",
"lakes: gpd.GeoDataFrame = antarctic_lakes.query(expr=\"basin_name == @placename\")\n",
"lake = lakes.loc[lakes.inner_dhdt.idxmin() if draining else lakes.inner_dhdt.idxmax()]\n",
"lake = lakes.query(expr=\"inner_dhdt < 0\" if draining else \"inner_dhdt > 0\").loc[48]\n",
"lakes: gpd.GeoDataFrame = antarctic_lakes # .query(expr=\"basin_name == @placename\")\n",
"\n",
"lake_ids: int = (44,) # single lake\n",
"lake_ids: tuple = (41, 43, 45) # lake mega-cluster\n",
"# TODO handle Lake 78 cross-basin by using dissolve(by=None) available\n",
"# in geopandas v0.9.0 https://github.com/geopandas/geopandas/pull/1568\n",
"lake = lakes.loc[list(lake_ids)].dissolve(by=\"basin_name\", as_index=False).squeeze()\n",
"lakedict = {\n",
" 21: \"Mercer 2b\", # filling lake\n",
" 40: \"Lower Subglacial Lake Conway\", # draining lake\n",
" 41: \"Subglacial Lake Conway\", # draining lake\n",
" 48: \"Subglacial Lake Whillans\", # filling lake\n",
" 50: \"Whillans IX\", # filling lake\n",
" 63: \"Kamb 1\", # filling lake\n",
" 65: \"Kamb 12\", # filling lake\n",
" 97: \"MacAyeal 1\", # draining lake\n",
" 109: \"Slessor 45\", # draining lake\n",
" 116: \"Slessor 23\", # filling lake\n",
" 151: \"Recovery IV\", # draining lake\n",
" 156: \"Recovery 2\", # filling lake\n",
" (15, 19): \"Subglacial Lake Mercer\", # filling lake\n",
" (32,): \"Whillans 7\", # draining lake\n",
" (34, 35): \"Subglacial Lake Conway\", # draining lake\n",
" (41, 43, 45): \"Subglacial Lake Whillans\", # filling lake\n",
" (16, 46, 48): \"Lake 78\", # filling lake\n",
" (44,): \"Whillans IX\", # filling lake\n",
" (62,): \"Kamb 1\", # filling lake\n",
" # (65): \"Kamb 12\", # filling lake\n",
" (84,): \"MacAyeal 1\", # draining lake\n",
" (95,): \"Slessor 45\", # draining lake\n",
" (101,): \"Slessor 23\", # filling lake\n",
" (141,): \"Recovery IV\", # draining lake\n",
" (143, 144): \"Recovery 2\", # filling lake\n",
"}\n",
"region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake.name])\n",
"region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake_ids])\n",
"assert (lake.inner_dhdt < 0 and draining) or (lake.inner_dhdt > 0 and not draining)\n",
"\n",
"print(lake)\n",
"lake.geometry"
Expand All @@ -1019,7 +1026,12 @@
"source": [
"# Subset data to lake of interest\n",
"placename: str = region.name.lower().replace(\" \", \"_\")\n",
"df_lake: cudf.DataFrame = region.subset(data=df_dhdt)"
"df_lake: cudf.DataFrame = region.subset(data=df_dhdt)\n",
"\n",
"# Save lake outline to OGR GMT file format\n",
"outline_points: str = f\"figures/{placename}/{placename}.gmt\"\n",
"if not os.path.exists(path=outline_points):\n",
" lakes.loc[list(lake_ids)].to_file(filename=outline_points, driver=\"OGR_GMT\")"
]
},
{
Expand All @@ -1044,7 +1056,7 @@
],
"source": [
"# Generate gridded time-series of ice elevation over lake\n",
"cycles: tuple = (3, 4, 5, 6, 7, 8)\n",
"cycles: tuple = (3, 4, 5, 6, 7, 8, 9)\n",
"os.makedirs(name=f\"figures/{placename}\", exist_ok=True)\n",
"ds_lake: xr.Dataset = deepicedrain.spatiotemporal_cube(\n",
" table=df_lake.to_pandas(),\n",
Expand Down Expand Up @@ -1092,15 +1104,9 @@
" time_nsec: pd.Timestamp = df_lake[f\"utc_time_{cycle}\"].to_pandas().mean()\n",
" time_sec: str = np.datetime_as_string(arr=time_nsec.to_datetime64(), unit=\"s\")\n",
"\n",
" grid = f\"figures/{placename}/h_corr_{placename}_cycle_{cycle}.nc\"\n",
" points = pd.DataFrame(\n",
" data=np.vstack(lake.geometry.boundary.coords.xy).T, columns=(\"x\", \"y\")\n",
" )\n",
" outline_points = pygmt.grdtrack(points=points, grid=grid, newcolname=\"z\")\n",
" outline_points[\"z\"] = outline_points.z.fillna(value=outline_points.z.median())\n",
"\n",
" # grid = ds_lake.sel(cycle_number=cycle).z\n",
" fig = deepicedrain.plot_icesurface(\n",
" grid=grid, # ds_lake.sel(cycle_number=cycle).z\n",
" grid=f\"figures/{placename}/h_corr_{placename}_cycle_{cycle}.nc\",\n",
" grid_region=grid_region,\n",
" diff_grid=ds_lake_diff.sel(cycle_number=cycle).z,\n",
" diff_grid_region=diff_grid_region,\n",
Expand Down
57 changes: 31 additions & 26 deletions atlxi_lake.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@

# %%
# Save or load dhdt data from Parquet file
placename: str = "siple_coast" # "Recovery" # "Whillans"
placename: str = "siple_coast" # "slessor_downstream" # "Recovery" # "Whillans"
try:
drainage_basins: gpd.GeoDataFrame = drainage_basins.set_index(keys="NAME")
region: deepicedrain.Region = deepicedrain.Region.from_gdf(
Expand All @@ -347,29 +347,36 @@
# %%
# Antarctic subglacial lake polygons with EPSG:3031 coordinates
antarctic_lakes: gpd.GeoDataFrame = deepicedrain.catalog.subglacial_lakes.read()
antarctic_lakes = antarctic_lakes.set_crs(epsg=3031, allow_override=True)

# %%
# Choose one draining/filling lake
draining: bool = False
placename: str = "Whillans" # "Slessor" # "Kamb" # "Mercer" #
lakes: gpd.GeoDataFrame = antarctic_lakes.query(expr="basin_name == @placename")
lake = lakes.loc[lakes.inner_dhdt.idxmin() if draining else lakes.inner_dhdt.idxmax()]
lake = lakes.query(expr="inner_dhdt < 0" if draining else "inner_dhdt > 0").loc[48]
lakes: gpd.GeoDataFrame = antarctic_lakes # .query(expr="basin_name == @placename")

lake_ids: int = (44,) # single lake
lake_ids: tuple = (41, 43, 45) # lake mega-cluster
# TODO handle Lake 78 cross-basin by using dissolve(by=None) available
# in geopandas v0.9.0 https://github.com/geopandas/geopandas/pull/1568
lake = lakes.loc[list(lake_ids)].dissolve(by="basin_name", as_index=False).squeeze()
lakedict = {
21: "Mercer 2b", # filling lake
40: "Lower Subglacial Lake Conway", # draining lake
41: "Subglacial Lake Conway", # draining lake
48: "Subglacial Lake Whillans", # filling lake
50: "Whillans IX", # filling lake
63: "Kamb 1", # filling lake
65: "Kamb 12", # filling lake
97: "MacAyeal 1", # draining lake
109: "Slessor 45", # draining lake
116: "Slessor 23", # filling lake
151: "Recovery IV", # draining lake
156: "Recovery 2", # filling lake
(15, 19): "Subglacial Lake Mercer", # filling lake
(32,): "Whillans 7", # draining lake
(34, 35): "Subglacial Lake Conway", # draining lake
(41, 43, 45): "Subglacial Lake Whillans", # filling lake
(16, 46, 48): "Lake 78", # filling lake
(44,): "Whillans IX", # filling lake
(62,): "Kamb 1", # filling lake
# (65): "Kamb 12", # filling lake
(84,): "MacAyeal 1", # draining lake
(95,): "Slessor 45", # draining lake
(101,): "Slessor 23", # filling lake
(141,): "Recovery IV", # draining lake
(143, 144): "Recovery 2", # filling lake
}
region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake.name])
region = deepicedrain.Region.from_gdf(gdf=lake, name=lakedict[lake_ids])
assert (lake.inner_dhdt < 0 and draining) or (lake.inner_dhdt > 0 and not draining)

print(lake)
lake.geometry
Expand All @@ -379,13 +386,17 @@
placename: str = region.name.lower().replace(" ", "_")
df_lake: cudf.DataFrame = region.subset(data=df_dhdt)

# Save lake outline to OGR GMT file format
outline_points: str = f"figures/{placename}/{placename}.gmt"
if not os.path.exists(path=outline_points):
lakes.loc[list(lake_ids)].to_file(filename=outline_points, driver="OGR_GMT")

# %% [markdown]
# ## Create an interpolated ice surface elevation grid for each ICESat-2 cycle

# %%
# Generate gridded time-series of ice elevation over lake
cycles: tuple = (3, 4, 5, 6, 7, 8)
cycles: tuple = (3, 4, 5, 6, 7, 8, 9)
os.makedirs(name=f"figures/{placename}", exist_ok=True)
ds_lake: xr.Dataset = deepicedrain.spatiotemporal_cube(
table=df_lake.to_pandas(),
Expand Down Expand Up @@ -413,15 +424,9 @@
time_nsec: pd.Timestamp = df_lake[f"utc_time_{cycle}"].to_pandas().mean()
time_sec: str = np.datetime_as_string(arr=time_nsec.to_datetime64(), unit="s")

grid = f"figures/{placename}/h_corr_{placename}_cycle_{cycle}.nc"
points = pd.DataFrame(
data=np.vstack(lake.geometry.boundary.coords.xy).T, columns=("x", "y")
)
outline_points = pygmt.grdtrack(points=points, grid=grid, newcolname="z")
outline_points["z"] = outline_points.z.fillna(value=outline_points.z.median())

# grid = ds_lake.sel(cycle_number=cycle).z
fig = deepicedrain.plot_icesurface(
grid=grid, # ds_lake.sel(cycle_number=cycle).z
grid=f"figures/{placename}/h_corr_{placename}_cycle_{cycle}.nc",
grid_region=grid_region,
diff_grid=ds_lake_diff.sel(cycle_number=cycle).z,
diff_grid_region=diff_grid_region,
Expand Down
47 changes: 31 additions & 16 deletions deepicedrain/features/subglacial_lakes.feature
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,39 @@ Feature: Mapping Antarctic subglacial lakes


Scenario Outline: Subglacial Lake Animation
Given some altimetry data at <location> spatially subsetted to <lake_name> with <lake_id>
Given some altimetry data at <location> spatially subsetted to <lake_name> with <lake_ids>
When it is turned into a spatiotemporal cube over ICESat-2 cycles <cycles>
And visualized at each cycle using a 3D perspective at <azimuth> and <elevation>
Then the result is an animation of ice surface elevation changing over time

Examples:
| location | lake_name | lake_id | cycles | azimuth | elevation |
# | whillans_downstream | Mercer 2b | 21 | 3-8 | 157.5 | 45 |
# | whillans_downstream | Lower Subglacial Lake Conway | 40 | 3-8 | 157.5 | 45 |
# | whillans_downstream | Subglacial Lake Conway | 41 | 3-8 | 157.5 | 45 |
# | whillans_downstream | Subglacial Lake Whillans | 48 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans IX | 50 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Kamb 8 | 61 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Kamb 1 | 62 | 3-8 | 157.5 | 45 |
| whillans_upstream | Kamb 34 | 63 | 4-7 | 157.5 | 45 |
# | siple_coast | Kamb 12 | 65 | 3-8 | 157.5 | 45 |
# | siple_coast | MacAyeal 1 | 97 | 3-8 | 157.5 | 60 |
# | slessor_downstream | Slessor 45 | 109 | 3-8 | 202.5 | 60 |
# | slessor_downstream | Slessor 23 | 116 | 3-8 | 202.5 | 60 |
| slessor_downstream | Recovery IV | 141 | 3-8 | 247.5 | 45 |
# | slessor_downstream | Recovery 2 | 156 | 3-8 | 202.5 | 45 |
| location | lake_name | lake_ids | cycles | azimuth | elevation |
# | whillans_downstream | Mercer XV | 17 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans 7 | 32 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans 6 | 33 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans X | 38 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans XI | 49 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Whillans IX | 44 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Kamb 8 | 61 | 3-8 | 157.5 | 45 |
# | whillans_upstream | Kamb 1 | 62 | 3-8 | 157.5 | 45 |
| whillans_upstream | Kamb 34 | 63 | 4-7 | 157.5 | 45 |
# | siple_coast | Kamb 12 | 65 | 3-8 | 157.5 | 45 |
# | siple_coast | MacAyeal 1 | 84 | 3-8 | 157.5 | 60 |
# | slessor_downstream | Slessor 45 | 95 | 3-8 | 202.5 | 60 |
# | slessor_downstream | Slessor 23 | 101 | 3-8 | 202.5 | 60 |
| slessor_downstream | Recovery IV | 141 | 3-8 | 247.5 | 45 |


Scenario Outline: Subglacial Lake Mega-Cluster Animation
Given some altimetry data at <location> spatially subsetted to <lake_name> with <lake_ids>
When it is turned into a spatiotemporal cube over ICESat-2 cycles <cycles>
And visualized at each cycle using a 3D perspective at <azimuth> and <elevation>
Then the result is an animation of ice surface elevation changing over time

Examples:
| location | lake_name | lake_ids | cycles | azimuth | elevation |
# | whillans_downstream | Lake 78 | 16,46,48 | 3-8 | 157.5 | 45 |
# | whillans_downstream | Subglacial Lake Conway | 34,35 | 3-8 | 157.5 | 45 |
| whillans_downstream | Subglacial Lake Mercer | 15,19 | 3-8 | 157.5 | 45 |
# | whillans_downstream | Subglacial Lake Whillans | 41,43,45 | 3-8 | 157.5 | 45 |
# | slessor_downstream | Recovery 2 | 143,144 | 3-8 | 202.5 | 45 |
2 changes: 1 addition & 1 deletion deepicedrain/spatiotemporal.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def from_gdf(
import pygmt

xmin, xmax, ymin, ymax = pygmt.info(
table=np.vstack(gdf.geometry.exterior.coords.xy).T,
table=np.vstack(gdf.geometry.convex_hull.exterior.coords.xy).T,
spacing=float(spacing),
)
except (ImportError, TypeError):
Expand Down
Loading

0 comments on commit c93eded

Please sign in to comment.