Skip to content

Commit

Permalink
Fix partial_fit_items bug with only growing by a single item (#563)
Browse files Browse the repository at this point in the history
When trying to update with exactly 1 new item, partial_fit_items would
throw an exception since we had an off-by-one error in allocating
factor storage. Fix and add a unittest that would catch this.

Closes #556
  • Loading branch information
benfred authored Apr 12, 2022
1 parent d15958e commit 6491663
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 2 deletions.
2 changes: 1 addition & 1 deletion implicit/cpu/als.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def partial_fit_items(self, itemids, item_users):
# ensure that we have enough storage for any new items
items, factors = self.item_factors.shape
max_itemid = max(itemids)
if max_itemid > items:
if max_itemid >= items:
self.item_factors = np.concatenate(
[self.item_factors, np.zeros((max_itemid - items + 1, factors), dtype=self.dtype)]
)
Expand Down
2 changes: 1 addition & 1 deletion implicit/gpu/als.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def partial_fit_items(self, itemids, item_users):
# ensure that we have enough storage for any new items
items, factors = self.item_factors.shape
max_itemid = max(itemids)
if max_itemid > items:
if max_itemid >= items:
# TODO: grow exponentially ?
self.item_factors.resize(max_itemid + 1, factors)

Expand Down
6 changes: 6 additions & 0 deletions tests/als_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,3 +255,9 @@ def test_incremental_retrain(use_gpu):
model.partial_fit_items([100], likes[1])
ids, _ = model.recommend(1, likes[1], N=2)
assert set(ids) == {1, 100}

# check to make sure we can index only a single extra item/user
model.partial_fit_users([101], likes[1])
model.partial_fit_items([101], likes[1])
ids, _ = model.recommend(101, likes[1], N=3)
assert set(ids) == {1, 100, 101}

0 comments on commit 6491663

Please sign in to comment.