Skip to content

Commit

Permalink
Merge branch 'master' into new-grid-interpolation
Browse files Browse the repository at this point in the history
  • Loading branch information
johnomotani committed Oct 13, 2022
2 parents 27f30b1 + a23da81 commit 0f0fd09
Show file tree
Hide file tree
Showing 14 changed files with 529 additions and 62 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pythonpackage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:
matrix:
python-version: [3.7, 3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: false

steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pythonpublish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
matrix:
python-version: [3.7, 3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: true

steps:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# xBOUT

[![Build Status](https://github.com/boutproject/xBOUT/workflows/master/badge.svg)](https://github.com/boutproject/xBOUT/actions)
[![codecov](https://codecov.io/gh/boutproject/xBOUT/branch/master/graph/badge.svg)](https://codecov.io/gh/boutproject/xBOUT)
[![codecov](https://codecov.io/gh/boutproject/xBOUT/branch/master/graph/badge.svg)](https://codecov.io/gh/boutproject/xBOUT) [![Documentation Status](https://readthedocs.org/projects/xbout/badge/?version=latest)](https://xbout.readthedocs.io/en/latest/?badge=latest) [![DOI](https://zenodo.org/badge/160846663.svg)](https://zenodo.org/badge/latestdoi/160846663)

Documentation: https://xbout.readthedocs.io

Expand Down
48 changes: 48 additions & 0 deletions xbout/boutdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from .plotting.utils import _create_norm
from .region import _from_region
from .utils import (
_add_cartesian_coordinates,
_make_1d_xcoord,
_update_metadata_increased_x_resolution,
_update_metadata_increased_y_resolution,
Expand Down Expand Up @@ -823,6 +824,17 @@ def interpolate_to_new_grid(
result = apply_geometry(result, self.data.geometry)
return result[self.data.name]

def add_cartesian_coordinates(self):
"""
Add Cartesian (X,Y,Z) coordinates.
Returns
-------
DataArray with new coordinates added, which are named 'X_cartesian',
'Y_cartesian', and 'Z_cartesian'
"""
return _add_cartesian_coordinates(self.data)

def remove_yboundaries(self, return_dataset=False, remove_extra_upper=False):
"""
Remove y-boundary points, if present, from the DataArray
Expand Down Expand Up @@ -1443,6 +1455,42 @@ def interpolate_from_unstructured(

return result

def interpolate_to_cartesian(self, *args, **kwargs):
"""
Interpolate the DataArray to a regular Cartesian grid.
This method is intended to be used to produce data for visualisation, which
normally does not require double-precision values, so by default the data is
converted to `np.float32`. Pass `use_float32=False` to retain the original
precision.
Parameters
----------
nX : int (default 300)
Number of grid points in the X direction
nY : int (default 300)
Number of grid points in the Y direction
nZ : int (default 100)
Number of grid points in the Z direction
use_float32 : bool (default True)
Downgrade precision to `np.float32`?
fill_value : float (default np.nan)
Value to use for points outside the interpolation domain (passed to
`scipy.RegularGridInterpolator`)
See Also
--------
BoutDataset.interpolate_to_cartesian
"""
da = self.data
name = da.name
ds = da.to_dataset()
# Dataset needs geometry and metadata attributes, but these are not copied from
# the DataArray by default
ds.attrs["geometry"] = da.geometry
ds.attrs["metadata"] = da.metadata
return ds.bout.interpolate_to_cartesian(*args, **kwargs)[name]

# BOUT-specific plotting functionality: methods that plot on a poloidal (R-Z) plane
def contour(self, ax=None, **kwargs):
"""
Expand Down
183 changes: 181 additions & 2 deletions xbout/boutdataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,11 @@
_parse_coord_option,
)
from .region import _from_region
from .utils import _get_bounding_surfaces, _split_into_restarts
from .utils import (
_add_cartesian_coordinates,
_get_bounding_surfaces,
_split_into_restarts,
)


@xr.register_dataset_accessor("bout")
Expand Down Expand Up @@ -746,6 +750,171 @@ def interpolate_from_unstructured(

return ds

def interpolate_to_cartesian(
self, nX=300, nY=300, nZ=100, *, use_float32=True, fill_value=np.nan
):
"""
Interpolate the Dataset to a regular Cartesian grid.
This method is intended to be used to produce data for visualisation, which
normally does not require double-precision values, so by default the data is
converted to `np.float32`. Pass `use_float32=False` to retain the original
precision.
Parameters
----------
nX : int (default 300)
Number of grid points in the X direction
nY : int (default 300)
Number of grid points in the Y direction
nZ : int (default 100)
Number of grid points in the Z direction
use_float32 : bool (default True)
Downgrade precision to `np.float32`?
fill_value : float (default np.nan)
Value to use for points outside the interpolation domain (passed to
`scipy.RegularGridInterpolator`)
See Also
--------
BoutDataArray.interpolate_to_cartesian
"""
ds = self.data
ds = ds.bout.add_cartesian_coordinates()

if not isinstance(use_float32, bool):
raise ValueError(f"use_float32 must be a bool, got '{use_float32}'")
if use_float32:
float_type = np.float32
ds = ds.astype(float_type)
for coord in ds.coords:
# Coordinates are not converted by Dataset.astype, so convert explicitly
ds[coord] = ds[coord].astype(float_type)
fill_value = float_type(fill_value)
else:
float_type = ds[ds.data_vars[0]].dtype

tdim = ds.metadata["bout_tdim"]
zdim = ds.metadata["bout_zdim"]
if tdim in ds.dims:
nt = ds.sizes[tdim]
n_toroidal = ds.sizes[zdim]

# Create Cartesian grid to interpolate to
Xmin = ds["X_cartesian"].min()
Xmax = ds["X_cartesian"].max()
Ymin = ds["Y_cartesian"].min()
Ymax = ds["Y_cartesian"].max()
Zmin = ds["Z_cartesian"].min()
Zmax = ds["Z_cartesian"].max()
newX_1d = xr.DataArray(np.linspace(Xmin, Xmax, nX), dims="X")
newX = newX_1d.expand_dims({"Y": nY, "Z": nZ}, axis=[1, 2])
newY_1d = xr.DataArray(np.linspace(Ymin, Ymax, nY), dims="Y")
newY = newY_1d.expand_dims({"X": nX, "Z": nZ}, axis=[0, 2])
newZ_1d = xr.DataArray(np.linspace(Zmin, Zmax, nZ), dims="Z")
newZ = newZ_1d.expand_dims({"X": nX, "Y": nY}, axis=[0, 1])
newR = np.sqrt(newX**2 + newY**2)
newzeta = np.arctan2(newY, newX)
# Define newzeta in range 0->2*pi
newzeta = np.where(newzeta < 0.0, newzeta + 2.0 * np.pi, newzeta)

from scipy.interpolate import (
RegularGridInterpolator,
griddata,
)

# Create Cylindrical coordinates for intermediate grid
Rcyl_min = float_type(ds["R"].min())
Rcyl_max = float_type(ds["R"].max())
Zcyl_min = float_type(ds["Z"].min())
Zcyl_max = float_type(ds["Z"].max())
n_Rcyl = int(round(nZ * (Rcyl_max - Rcyl_min) / (Zcyl_max - Zcyl_min)))
Rcyl = xr.DataArray(np.linspace(Rcyl_min, Rcyl_max, 2 * n_Rcyl), dims="r")
Zcyl = xr.DataArray(np.linspace(Zcyl_min, Zcyl_max, 2 * nZ), dims="z")

# Create Dataset for result
result = xr.Dataset()
result.attrs["metadata"] = ds.metadata

# Interpolate in two stages for efficiency. Unstructured 3d interpolation is
# very slow. Unstructured 2d interpolation onto Cartesian (R, Z) grids, followed
# by structured 3d interpolation onto the (X, Y, Z) grid, is much faster.
# Structured 3d interpolation straight from (psi, theta, zeta) to (X, Y, Z)
# leaves artifacts in the output, because theta does not vary continuously
# everywhere (has branch cuts).

zeta_out = np.zeros(n_toroidal + 1)
zeta_out[:-1] = ds[zdim].values
zeta_out[-1] = zeta_out[-2] + ds["dz"].mean()

def interp_single_time(da):
print(" interpolate poloidal planes")

da_cyl = da.bout.interpolate_from_unstructured(R=Rcyl, Z=Zcyl).transpose(
"R", "Z", zdim, missing_dims="ignore"
)

if zdim not in da_cyl.dims:
da_cyl = da_cyl.expand_dims({zdim: n_toroidal + 1}, axis=-1)
else:
# Impose toroidal periodicity by appending zdim=0 to end of array
da_cyl = xr.concat((da_cyl, da_cyl.isel({zdim: 0})), zdim)

print(" build 3d interpolator")
interp = RegularGridInterpolator(
(Rcyl.values, Zcyl.values, zeta_out),
da_cyl.values,
bounds_error=False,
fill_value=fill_value,
)

print(" do 3d interpolation")
return interp(
(newR, newZ, newzeta),
method="linear",
)

for name, da in ds.data_vars.items():
print(f"\ninterpolating {name}")
# order of dimensions does not really matter here - output only depends on
# shape of newR, newZ, newzeta. Possibly more efficient to assign the 2d
# results in the loop to the last two dimensions, so put zeta first. Can't
# just use da.min().item() here (to get a scalar value instead of a
# zero-size array) because .item() doesn't work for dask arrays (yet!).

datamin = float_type(da.min().values)
datamax = float_type(da.max().values)

if tdim in da.dims:
data_cartesian = np.zeros((nt, nX, nY, nZ), dtype=float_type)
for tind in range(nt):
print(f" tind={tind}")
data_cartesian[tind, :, :, :] = interp_single_time(
da.isel({tdim: tind})
)
result[name] = xr.DataArray(data_cartesian, dims=[tdim, "X", "Y", "Z"])
else:
data_cartesian = interp_single_time(da)
result[name] = xr.DataArray(data_cartesian, dims=["X", "Y", "Z"])

# Copy metadata to data variables, in case it is needed
result[name].attrs["metadata"] = ds.metadata

result = result.assign_coords(X=newX_1d, Y=newY_1d, Z=newZ_1d)

return result

def add_cartesian_coordinates(self):
"""
Add Cartesian (X,Y,Z) coordinates.
Returns
-------
Dataset with new coordinates added, which are named 'X_cartesian',
'Y_cartesian', and 'Z_cartesian'
"""
return _add_cartesian_coordinates(self.data)

def remove_yboundaries(self, **kwargs):
"""
Remove y-boundary points, if present, from the Dataset
Expand Down Expand Up @@ -1006,7 +1175,13 @@ def to_restart(
Number of processors in the y-direction. If not given, keep the number used
for the original simulation
tind : int, default -1
Time-index of the slice to write to the restart files
Time-index of the slice to write to the restart files. Note, when creating
restart files from 'dump' files it is recommended to open the Dataset using
the full time range and use the `tind` argument here, rather than selecting
a time point manually, so that the calculation of `hist_hi` in the output
can be correct (which requires knowing the existing value of `hist_hi`
(output step count at the end of the simulation), `tind` and the total
number of time points in the current output data).
prefix : str, default "BOUT.restart"
Prefix to use for names of restart files
overwrite : bool, default False
Expand Down Expand Up @@ -1266,6 +1441,8 @@ def is_list(variable):
animate=False,
axis_coords=this_axis_coords,
aspect=this_aspect,
vmin=this_vmin,
vmax=this_vmax,
**this_kwargs,
)
)
Expand All @@ -1279,6 +1456,8 @@ def is_list(variable):
animate=False,
axis_coords=this_axis_coords,
aspect=this_aspect,
vmin=this_vmin,
vmax=this_vmax,
label=w.name,
**this_kwargs,
)
Expand Down
6 changes: 3 additions & 3 deletions xbout/calc/tests/test_turbulence.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def test_1d(self):
dat = np.array([5, 7, 3.2, -1, -4.4])
orig = DataArray(dat, dims=["x"])

sum_squares = np.sum(dat ** 2)
sum_squares = np.sum(dat**2)
mean_squares = sum_squares / dat.size
rootmeansquare = np.sqrt(mean_squares)

Expand All @@ -31,7 +31,7 @@ def test_1d(self):
def test_reduce_2d(self, dim, axis):
dat = np.array([[5, 7, 3.2, -1, -4.4], [-1, -2.5, 0, 8, 3.0]])
orig = DataArray(dat, dims=["x", "t"])
sum_squares = np.sum(dat ** 2, axis=axis)
sum_squares = np.sum(dat**2, axis=axis)
mean_squares = sum_squares / dat.shape[axis]
rootmeansquare = np.sqrt(mean_squares)

Expand All @@ -44,7 +44,7 @@ def test_reduce_2d_dask(self):
orig = DataArray(dat, dims=["x", "t"])
chunked = orig.chunk({"x": 1})
axis = 1
sum_squares = np.sum(dat ** 2, axis=axis)
sum_squares = np.sum(dat**2, axis=axis)
mean_squares = sum_squares / dat.shape[axis]
rootmeansquare = np.sqrt(mean_squares)

Expand Down
9 changes: 8 additions & 1 deletion xbout/geometries.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,14 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None):
can_use_1d_z_coord = (nz == 1) or use_metric_3d

if can_use_1d_z_coord:
z = _1d_coord_from_spacing(updated_ds["dz"], zcoord, updated_ds)
if updated_ds.geometry == "fci":
# dz is varying. just set to a linspace
z = xr.DataArray(
np.linspace(start=0, stop=2 * np.pi, num=nz, endpoint=False),
dims=zcoord,
)
else:
z = _1d_coord_from_spacing(updated_ds["dz"], zcoord, updated_ds)
else:
if bout_v5:
if not np.all(updated_ds["dz"].min() == updated_ds["dz"].max()):
Expand Down
Loading

0 comments on commit 0f0fd09

Please sign in to comment.