ai-content-maker/.venv/Lib/site-packages/spacy/tests/lang/hsb/test_tokenizer.py

33 lines
866 B
Python
Raw Permalink Normal View History

2024-05-03 04:18:51 +03:00
import pytest
HSB_BASIC_TOKENIZATION_TESTS = [
(
"Hornjoserbšćina wobsteji resp. wobsteješe z wjacorych dialektow, kotrež so zdźěla chětro wot so rozeznawachu.",
[
"Hornjoserbšćina",
"wobsteji",
"resp.",
"wobsteješe",
"z",
"wjacorych",
"dialektow",
",",
"kotrež",
"so",
"zdźěla",
"chětro",
"wot",
"so",
"rozeznawachu",
".",
],
),
]
@pytest.mark.parametrize("text,expected_tokens", HSB_BASIC_TOKENIZATION_TESTS)
def test_hsb_tokenizer_basic(hsb_tokenizer, text, expected_tokens):
tokens = hsb_tokenizer(text)
token_list = [token.text for token in tokens if not token.is_space]
assert expected_tokens == token_list