ai-content-maker/.venv/Lib/site-packages/spacy/lang/tl/tokenizer_exceptions.py

19 lines
687 B
Python
Raw Normal View History

2024-05-03 04:18:51 +03:00
from ...symbols import NORM, ORTH
from ...util import update_exc
from ..tokenizer_exceptions import BASE_EXCEPTIONS
_exc = {
"tayo'y": [{ORTH: "tayo"}, {ORTH: "'y", NORM: "ay"}],
"isa'y": [{ORTH: "isa"}, {ORTH: "'y", NORM: "ay"}],
"baya'y": [{ORTH: "baya"}, {ORTH: "'y", NORM: "ay"}],
"sa'yo": [{ORTH: "sa"}, {ORTH: "'yo", NORM: "iyo"}],
"ano'ng": [{ORTH: "ano"}, {ORTH: "'ng", NORM: "ang"}],
"siya'y": [{ORTH: "siya"}, {ORTH: "'y", NORM: "ay"}],
"nawa'y": [{ORTH: "nawa"}, {ORTH: "'y", NORM: "ay"}],
"papa'no": [{ORTH: "papa'no", NORM: "papaano"}],
"'di": [{ORTH: "'di", NORM: "hindi"}],
}
TOKENIZER_EXCEPTIONS = update_exc(BASE_EXCEPTIONS, _exc)