Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up warnings in the test suite #11331

Merged
merged 1 commit into from
Aug 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/azure-steps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,12 @@ steps:
condition: eq(${{ parameters.gpu }}, true)

- script: |
${{ parameters.prefix }} python -m pytest --pyargs spacy
${{ parameters.prefix }} python -m pytest --pyargs spacy -W error
displayName: "Run CPU tests"
condition: eq(${{ parameters.gpu }}, false)

- script: |
${{ parameters.prefix }} python -m pytest --pyargs spacy -p spacy.tests.enable_gpu
${{ parameters.prefix }} python -m pytest --pyargs spacy -W error -p spacy.tests.enable_gpu
displayName: "Run GPU tests"
condition: eq(${{ parameters.gpu }}, true)

Expand Down
5 changes: 3 additions & 2 deletions spacy/tests/doc/test_doc_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy
from numpy.testing import assert_array_equal
import pytest
import warnings
from thinc.api import NumpyOps, get_current_ops

from spacy.attrs import DEP, ENT_IOB, ENT_TYPE, HEAD, IS_ALPHA, MORPH, POS
Expand Down Expand Up @@ -529,9 +530,9 @@ def test_doc_from_array_sent_starts(en_vocab):
# no warning using default attrs
attrs = doc._get_array_attrs()
arr = doc.to_array(attrs)
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
new_doc.from_array(attrs, arr)
assert len(record) == 0
# only SENT_START uses SENT_START
attrs = [SENT_START]
arr = doc.to_array(attrs)
Expand Down
3 changes: 3 additions & 0 deletions spacy/tests/lang/ru/test_lemmatizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
from spacy.tokens import Doc


pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning")


def test_ru_doc_lemmatization(ru_lemmatizer):
words = ["мама", "мыла", "раму"]
pos = ["NOUN", "VERB", "NOUN"]
Expand Down
4 changes: 4 additions & 0 deletions spacy/tests/lang/uk/test_lemmatizer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import pytest
from spacy.tokens import Doc


pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning")


def test_uk_lemmatizer(uk_lemmatizer):
"""Check that the default uk lemmatizer runs."""
doc = Doc(uk_lemmatizer.vocab, words=["a", "b", "c"])
Expand Down
9 changes: 5 additions & 4 deletions spacy/tests/matcher/test_phrase_matcher.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pytest
import warnings
import srsly
from mock import Mock

Expand Down Expand Up @@ -344,13 +345,13 @@ def test_phrase_matcher_validation(en_vocab):
matcher.add("TEST1", [doc1])
with pytest.warns(UserWarning):
matcher.add("TEST2", [doc2])
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
matcher.add("TEST3", [doc3])
assert not record.list
matcher = PhraseMatcher(en_vocab, attr="POS", validate=True)
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
matcher.add("TEST4", [doc2])
assert not record.list


def test_attr_validation(en_vocab):
Expand Down
4 changes: 4 additions & 0 deletions spacy/tests/pipeline/test_entity_linker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1048,6 +1048,10 @@ def test_no_gold_ents(patterns):
for eg in train_examples:
eg.predicted = ruler(eg.predicted)

# Entity ruler is no longer needed (initialization below wipes out the
# patterns and causes warnings)
nlp.remove_pipe("entity_ruler")

def create_kb(vocab):
# create artificial KB
mykb = KnowledgeBase(vocab, entity_vector_length=vector_length)
Expand Down
2 changes: 2 additions & 0 deletions spacy/training/initialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,3 +337,5 @@ def ensure_shape(vectors_loc):
# store all the results in a list in memory
lines2 = open_file(vectors_loc)
yield from lines2
lines2.close()
lines.close()