Skip to content
forked from pydata/xarray

Commit

Permalink
upgrade black verison to 19.10b0 (pydata#3456)
Browse files Browse the repository at this point in the history
  • Loading branch information
max-sixty authored Oct 29, 2019
1 parent cb5eef1 commit 278d2e6
Show file tree
Hide file tree
Showing 15 changed files with 32 additions and 32 deletions.
2 changes: 1 addition & 1 deletion xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,7 @@ def open_dataarray(
"then select the variable you want."
)
else:
data_array, = dataset.data_vars.values()
(data_array,) = dataset.data_vars.values()

data_array._file_obj = dataset._file_obj

Expand Down
2 changes: 1 addition & 1 deletion xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def align(

if not indexes and len(objects) == 1:
# fast path for the trivial case
obj, = objects
(obj,) = objects
return (obj.copy(deep=copy),)

all_indexes = defaultdict(list)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -954,7 +954,7 @@ def _auto_concat(
"supply the ``concat_dim`` argument "
"explicitly"
)
dim, = concat_dims
(dim,) = concat_dims
return concat(
datasets,
dim=dim,
Expand Down
8 changes: 4 additions & 4 deletions xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def result_name(objects: list) -> Any:
names = {getattr(obj, "name", _DEFAULT_NAME) for obj in objects}
names.discard(_DEFAULT_NAME)
if len(names) == 1:
name, = names
(name,) = names
else:
name = None
return name
Expand Down Expand Up @@ -187,7 +187,7 @@ def build_output_coords(

if len(coords_list) == 1 and not exclude_dims:
# we can skip the expensive merge
unpacked_coords, = coords_list
(unpacked_coords,) = coords_list
merged_vars = dict(unpacked_coords.variables)
else:
# TODO: save these merged indexes, instead of re-computing them later
Expand Down Expand Up @@ -237,7 +237,7 @@ def apply_dataarray_vfunc(
for variable, coords in zip(result_var, result_coords)
)
else:
coords, = result_coords
(coords,) = result_coords
out = DataArray(result_var, coords, name=name, fastpath=True)

return out
Expand Down Expand Up @@ -384,7 +384,7 @@ def apply_dataset_vfunc(
if signature.num_outputs > 1:
out = tuple(_fast_dataset(*args) for args in zip(result_vars, list_of_coords))
else:
coord_vars, = list_of_coords
(coord_vars,) = list_of_coords
out = _fast_dataset(result_vars, coord_vars)

if keep_attrs and isinstance(first_obj, Dataset):
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,10 @@ def _calc_concat_dim_coord(dim):
dim = dim_name
elif not isinstance(dim, DataArray):
coord = as_variable(dim).to_index_variable()
dim, = coord.dims
(dim,) = coord.dims
else:
coord = dim
dim, = coord.dims
(dim,) = coord.dims
return dim, coord


Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ def _level_coords(self) -> Dict[Hashable, Hashable]:
if var.ndim == 1 and isinstance(var, IndexVariable):
level_names = var.level_names
if level_names is not None:
dim, = var.dims
(dim,) = var.dims
level_coords.update({lname: dim for lname in level_names})
return level_coords

Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4066,7 +4066,7 @@ def reduce(
if len(reduce_dims) == 1:
# unpack dimensions for the benefit of functions
# like np.argmin which can't handle tuple arguments
reduce_dims, = reduce_dims
(reduce_dims,) = reduce_dims
elif len(reduce_dims) == var.ndim:
# prefer to aggregate over axis=None rather than
# axis=(0, 1) if they will be equivalent, because
Expand Down
6 changes: 3 additions & 3 deletions xarray/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def __init__(
raise ValueError("`group` must have a name")

group, obj, stacked_dim, inserted_dims = _ensure_1d(group, obj)
group_dim, = group.dims
(group_dim,) = group.dims

expected_size = obj.sizes[group_dim]
if group.size != expected_size:
Expand Down Expand Up @@ -470,7 +470,7 @@ def _infer_concat_args(self, applied_example):
else:
coord = self._unique_coord
positions = None
dim, = coord.dims
(dim,) = coord.dims
if isinstance(coord, _DummyGroup):
coord = None
return coord, dim, positions
Expand Down Expand Up @@ -644,7 +644,7 @@ def _concat_shortcut(self, applied, dim, positions=None):
def _restore_dim_order(self, stacked):
def lookup_order(dimension):
if dimension == self._group.name:
dimension, = self._group.dims
(dimension,) = self._group.dims
if dimension in self._obj.dims:
axis = self._obj.get_axis_num(dimension)
else:
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def get_dim_indexers(data_obj, indexers):
level_indexers = defaultdict(dict)
dim_indexers = {}
for key, label in indexers.items():
dim, = data_obj[key].dims
(dim,) = data_obj[key].dims
if key != dim:
# assume here multi-index level indexer
level_indexers[dim][key] = label
Expand Down Expand Up @@ -1368,7 +1368,7 @@ def __getitem__(
if isinstance(key, tuple) and len(key) == 1:
# unpack key so it can index a pandas.Index object (pandas.Index
# objects don't like tuples)
key, = key
(key,) = key

if getattr(key, "ndim", 0) > 1: # Return np-array if multidimensional
return NumpyIndexingAdapter(self.array.values)[indexer]
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def append_all(variables, indexes):


def collect_from_coordinates(
list_of_coords: "List[Coordinates]"
list_of_coords: "List[Coordinates]",
) -> Dict[Hashable, List[MergeElement]]:
"""Collect variables and indexes to be merged from Coordinate objects."""
grouped: Dict[Hashable, List[Tuple[Variable, pd.Index]]] = {}
Expand Down Expand Up @@ -320,7 +320,7 @@ def merge_coordinates_without_align(


def determine_coords(
list_of_mappings: Iterable["DatasetLike"]
list_of_mappings: Iterable["DatasetLike"],
) -> Tuple[Set[Hashable], Set[Hashable]]:
"""Given a list of dicts with xarray object values, identify coordinates.
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -1526,7 +1526,7 @@ def concat(cls, variables, dim="concat_dim", positions=None, shortcut=False):
along the given dimension.
"""
if not isinstance(dim, str):
dim, = dim.dims
(dim,) = dim.dims

# can't do this lazily: we need to loop through variables at least
# twice
Expand Down Expand Up @@ -1996,7 +1996,7 @@ def concat(cls, variables, dim="concat_dim", positions=None, shortcut=False):
arrays, if possible.
"""
if not isinstance(dim, str):
dim, = dim.dims
(dim,) = dim.dims

variables = list(variables)
first_var = variables[0]
Expand Down
8 changes: 4 additions & 4 deletions xarray/plot/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ def _infer_line_data(darray, x, y, hue):
)

else:
xdim, = darray[xname].dims
huedim, = darray[huename].dims
(xdim,) = darray[xname].dims
(huedim,) = darray[huename].dims
yplt = darray.transpose(xdim, huedim)

else:
Expand All @@ -102,8 +102,8 @@ def _infer_line_data(darray, x, y, hue):
)

else:
ydim, = darray[yname].dims
huedim, = darray[huename].dims
(ydim,) = darray[yname].dims
(huedim,) = darray[huename].dims
xplt = darray.transpose(ydim, huedim)

huelabel = label_from_attrs(darray[huename])
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1187,5 +1187,5 @@ def test_dayofyear_after_cftime_range(freq):
def test_cftime_range_standard_calendar_refers_to_gregorian():
from cftime import DatetimeGregorian

result, = cftime_range("2000", periods=1)
(result,) = cftime_range("2000", periods=1)
assert isinstance(result, DatetimeGregorian)
8 changes: 4 additions & 4 deletions xarray/tests/test_dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -3125,11 +3125,11 @@ def test_align_copy(self):

# Trivial align - 1 element
x = DataArray([1, 2, 3], coords=[("a", [1, 2, 3])])
x2, = align(x, copy=False)
(x2,) = align(x, copy=False)
assert_identical(x, x2)
assert source_ndarray(x2.data) is source_ndarray(x.data)

x2, = align(x, copy=True)
(x2,) = align(x, copy=True)
assert_identical(x, x2)
assert source_ndarray(x2.data) is not source_ndarray(x.data)

Expand Down Expand Up @@ -3214,7 +3214,7 @@ def test_align_indexes(self):
assert_identical(expected_x2, x2)
assert_identical(expected_y2, y2)

x2, = align(x, join="outer", indexes={"a": [-2, 7, 10, -1]})
(x2,) = align(x, join="outer", indexes={"a": [-2, 7, 10, -1]})
expected_x2 = DataArray([3, np.nan, 2, 1], coords=[("a", [-2, 7, 10, -1])])
assert_identical(expected_x2, x2)

Expand Down Expand Up @@ -3293,7 +3293,7 @@ def test_broadcast_arrays_nocopy(self):
assert source_ndarray(x2.data) is source_ndarray(x.data)

# single-element broadcast (trivial case)
x2, = broadcast(x)
(x2,) = broadcast(x)
assert_identical(x, x2)
assert source_ndarray(x2.data) is source_ndarray(x.data)

Expand Down
6 changes: 3 additions & 3 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1945,7 +1945,7 @@ def test_align_nocopy(self):

def test_align_indexes(self):
x = Dataset({"foo": DataArray([1, 2, 3], dims="x", coords=[("x", [1, 2, 3])])})
x2, = align(x, indexes={"x": [2, 3, 1]})
(x2,) = align(x, indexes={"x": [2, 3, 1]})
expected_x2 = Dataset(
{"foo": DataArray([2, 3, 1], dims="x", coords={"x": [2, 3, 1]})}
)
Expand Down Expand Up @@ -1973,7 +1973,7 @@ def test_broadcast(self):
},
{"c": ("x", [4])},
)
actual, = broadcast(ds)
(actual,) = broadcast(ds)
assert_identical(expected, actual)

ds_x = Dataset({"foo": ("x", [1])})
Expand All @@ -1995,7 +1995,7 @@ def test_broadcast_nocopy(self):
x = Dataset({"foo": (("x", "y"), [[1, 1]])})
y = Dataset({"bar": ("y", [2, 3])})

actual_x, = broadcast(x)
(actual_x,) = broadcast(x)
assert_identical(x, actual_x)
assert source_ndarray(actual_x["foo"].data) is source_ndarray(x["foo"].data)

Expand Down

0 comments on commit 278d2e6

Please sign in to comment.