Skip to content

Commit

Permalink
ENH: add dropna keyword to Unique
Browse files Browse the repository at this point in the history
  • Loading branch information
martinfleis committed Jul 19, 2021
1 parent 9d0096a commit dfb04b7
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 2 deletions.
8 changes: 6 additions & 2 deletions momepy/diversity.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,6 +728,8 @@ class Unique:
spatial weights matrix
unique_id : str
name of the column with unique id used as ``spatial_weights`` index
dropna : bool (default True)
Don’t include NaN in the counts of unique values.
verbose : bool (default True)
if True, shows progress bars in loops and indication of steps
Expand All @@ -754,7 +756,9 @@ class Unique:
100%|██████████| 144/144 [00:00<00:00, 722.50it/s]
"""

def __init__(self, gdf, values, spatial_weights, unique_id, verbose=True):
def __init__(
self, gdf, values, spatial_weights, unique_id, dropna=True, verbose=True
):
self.gdf = gdf
self.sw = spatial_weights
self.id = gdf[unique_id]
Expand All @@ -775,7 +779,7 @@ def __init__(self, gdf, values, spatial_weights, unique_id, verbose=True):
neighbours += spatial_weights.neighbors[index]

values_list = data.loc[neighbours]
results_list.append(len(values_list.unique()))
results_list.append(values_list.nunique(dropna=dropna))
else:
results_list.append(np.nan)

Expand Down
7 changes: 7 additions & 0 deletions tests/test_diversity.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,13 @@ def test_Unique(self):
assert un.isna().any()
assert un[5] == 8

self.df_tessellation.loc[0, "cat"] = np.nan
un = mm.Unique(self.df_tessellation, "cat", self.sw, "uID", dropna=False).series
assert un[0] == 9

un = mm.Unique(self.df_tessellation, "cat", self.sw, "uID", dropna=True).series
assert un[0] == 8

def test_Percentile(self):
perc = mm.Percentiles(self.df_tessellation, "area", self.sw, "uID").frame
assert np.all(
Expand Down

0 comments on commit dfb04b7

Please sign in to comment.