ai-content-maker/.venv/Lib/site-packages/spacy/tests/lang/th/test_tokenizer.py

10 lines
318 B
Python
Raw Normal View History

2024-05-03 04:18:51 +03:00
import pytest
@pytest.mark.parametrize(
"text,expected_tokens", [("คุณรักผมไหม", ["คุณ", "รัก", "ผม", "ไหม"])]
)
def test_th_tokenizer(th_tokenizer, text, expected_tokens):
tokens = [token.text for token in th_tokenizer(text)]
assert tokens == expected_tokens