Skip to content

Commit

Permalink
Fix fastembed reloading issue. (#4117)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?


### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
  • Loading branch information
KevinHuSh authored Dec 19, 2024
1 parent 8939206 commit 7474348
Showing 1 changed file with 19 additions and 2 deletions.
21 changes: 19 additions & 2 deletions rag/llm/embedding_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def encode_queries(self, text: str):

class DefaultEmbedding(Base):
_model = None
_model_name = ""
_model_lock = threading.Lock()
def __init__(self, key, model_name, **kwargs):
"""
Expand All @@ -69,6 +70,7 @@ def __init__(self, key, model_name, **kwargs):
DefaultEmbedding._model = FlagModel(os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())
DefaultEmbedding._model_name = model_name
except Exception:
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5",
local_dir=os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
Expand All @@ -77,6 +79,7 @@ def __init__(self, key, model_name, **kwargs):
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())
self._model = DefaultEmbedding._model
self._model_name = DefaultEmbedding._model_name

def encode(self, texts: list):
batch_size = 16
Expand Down Expand Up @@ -250,6 +253,8 @@ def encode_queries(self, text):

class FastEmbed(Base):
_model = None
_model_name = ""
_model_lock = threading.Lock()

def __init__(
self,
Expand All @@ -260,8 +265,20 @@ def __init__(
**kwargs,
):
if not settings.LIGHTEN and not FastEmbed._model:
from fastembed import TextEmbedding
self._model = TextEmbedding(model_name, cache_dir, threads, **kwargs)
with FastEmbed._model_lock:
from fastembed import TextEmbedding
if not FastEmbed._model or model_name != FastEmbed._model_name:
try:
FastEmbed._model = TextEmbedding(model_name, cache_dir, threads, **kwargs)
FastEmbed._model_name = model_name
except Exception:
cache_dir = snapshot_download(repo_id="BAAI/bge-small-en-v1.5",
local_dir=os.path.join(get_home_cache_dir(),
re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
local_dir_use_symlinks=False)
FastEmbed._model = TextEmbedding(model_name, cache_dir, threads, **kwargs)
self._model = FastEmbed._model
self._model_name = model_name

def encode(self, texts: list):
# Using the internal tokenizer to encode the texts and get the total
Expand Down

0 comments on commit 7474348

Please sign in to comment.