We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent b7a4e39 commit dbe7a7cCopy full SHA for dbe7a7c
api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py
@@ -1,6 +1,9 @@
1
+import logging
2
from threading import Lock
3
from typing import Any
4
5
+logger = logging.getLogger(__name__)
6
+
7
_tokenizer: Any = None
8
_lock = Lock()
9
@@ -43,5 +46,6 @@ def get_encoder() -> Any:
43
46
base_path = abspath(__file__)
44
47
gpt2_tokenizer_path = join(dirname(base_path), "gpt2")
45
48
_tokenizer = TransformerGPT2Tokenizer.from_pretrained(gpt2_tokenizer_path)
49
+ logger.info("Fallback to Transformers' GPT-2 tokenizer from tiktoken")
50
51
return _tokenizer
0 commit comments