diff --git a/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi b/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi index 6f31ff3a2..bbaa3fce4 100644 --- a/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi +++ b/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi @@ -522,7 +522,7 @@ class UnicodeScripts(PreTokenizer): pass class Whitespace(PreTokenizer): - """ + r""" This pre-tokenizer simply splits using the following regex: `\w+|[^\w\s]+` """ def __init__(self):