From 6dd06f9f25cf65429d3cf1963f91444b2815ced7 Mon Sep 17 00:00:00 2001 From: opentaco Date: Tue, 15 Nov 2022 17:42:35 +0200 Subject: [PATCH] Comment update for tensor size --- bittensor/utils/tokenizer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/utils/tokenizer_utils.py b/bittensor/utils/tokenizer_utils.py index 304ec0a7f6..471d4f822c 100644 --- a/bittensor/utils/tokenizer_utils.py +++ b/bittensor/utils/tokenizer_utils.py @@ -875,7 +875,7 @@ def unravel_topk_token_phrases(compact_topk: torch.Tensor, topk: int, ignore_ind f'{batch_size} * ({topk} + 1) != {len(prob_idx)}' # decoding irregularity otherwise probs = torch.clamp(compact_topk[prob_idx], 0, 1) # [batch_size * (topk + 1)] ensure probabilities within [0, 1] - probs_sum = probs.reshape(batch_size, topk + 1).sum(dim=1) # [batch_size, (topk + 1)] + probs_sum = probs.reshape(batch_size, topk + 1).sum(dim=1) # [batch_size] assert torch.all((-atol < probs_sum) & (probs_sum < 1 + atol)), f'unravel_topk_token_phrases(): probs_sum not in [0, 1]' # Obtain phrase lengths and maximum phrase length