diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index 8c371ea8bd..182e1c4dc1 100644 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -474,7 +474,7 @@ def __init__( raise TypeError("You can't pass a generator as the sentences argument. Try an iterator.") self.build_vocab(sentences, trim_rule=trim_rule) self.train(sentences) - + else : if trim_rule is not None : logger.warning("The rule, if given, is only used prune vocabulary during build_vocab() and is not stored as part of the model. ") @@ -995,7 +995,7 @@ def score(self, sentences, total_sentences=int(1e6), chunksize=100, queue_factor run word2vec with hs=1 and negative=0 for this to work.") def worker_loop(): - """Train the model, lifting lists of sentences from the jobs queue.""" + """Compute log probability for each sentence, lifting lists of sentences from the jobs queue.""" work = zeros(1, dtype=REAL) # for sg hs, we actually only need one memory loc (running sum) neu1 = matutils.zeros_aligned(self.layer1_size, dtype=REAL) while True: