Skip to content

Commit

Permalink
Update Mistral and TogetherAI tests [release]
Browse files Browse the repository at this point in the history
  • Loading branch information
AjayP13 committed Aug 1, 2024
1 parent 89083f1 commit 8e546d2
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 9 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "DataDreamer"
version = "0.37.0"
version = "0.38.0"
description = "Prompt. Generate Synthetic Data. Train & Align Models."
license = "MIT"
authors= [
Expand Down
22 changes: 19 additions & 3 deletions src/llms/together.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,15 +90,31 @@ def retry_wrapper(self):
tenacity_logger = self.get_logger(key="retry", verbose=True, log_level=None)

@retry(
retry=retry_if_exception_type(together.RateLimitError),
retry=retry_if_exception_type(together.error.RateLimitError),
wait=wait_exponential(multiplier=1, min=10, max=60),
before_sleep=before_sleep_log(tenacity_logger, logging.INFO),
after=after_log(tenacity_logger, logging.INFO),
stop=stop_any(lambda _: not self.retry_on_fail), # type: ignore[arg-type]
reraise=True,
)
@retry(
retry=retry_if_exception_type(together.ResponseError),
retry=retry_if_exception_type(together.error.ResponseError),
wait=wait_exponential(multiplier=1, min=3, max=300),
before_sleep=before_sleep_log(tenacity_logger, logging.INFO),
after=after_log(tenacity_logger, logging.INFO),
stop=stop_any(lambda _: not self.retry_on_fail), # type: ignore[arg-type]
reraise=True,
)
@retry(
retry=retry_if_exception_type(together.error.APIConnectionError),
wait=wait_exponential(multiplier=1, min=3, max=300),
before_sleep=before_sleep_log(tenacity_logger, logging.INFO),
after=after_log(tenacity_logger, logging.INFO),
stop=stop_any(lambda _: not self.retry_on_fail), # type: ignore[arg-type]
reraise=True,
)
@retry(
retry=retry_if_exception_type(together.error.ServiceUnavailableError),
wait=wait_exponential(multiplier=1, min=3, max=300),
before_sleep=before_sleep_log(tenacity_logger, logging.INFO),
after=after_log(tenacity_logger, logging.INFO),
Expand All @@ -114,7 +130,7 @@ def retry_wrapper(self):
reraise=True,
)
@retry(
retry=retry_if_exception_type(together.TogetherException),
retry=retry_if_exception_type(together.error.TogetherException),
wait=wait_exponential(multiplier=1, min=3, max=300),
before_sleep=before_sleep_log(tenacity_logger, logging.INFO),
after=after_log(tenacity_logger, logging.INFO),
Expand Down
2 changes: 1 addition & 1 deletion src/tests/embedders/test_embedders.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ class TestTogetherEmbedder:
@classmethod
def setup_class(cls):
cls.pydantic_version = importlib.metadata.version("pydantic")
os.system("pip3 install together==0.2.10")
os.system("pip3 install together==1.2.5")
_reload_pydantic()

@classmethod
Expand Down
8 changes: 4 additions & 4 deletions src/tests/llms/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -2756,7 +2756,7 @@ class TestTogether:
@classmethod
def setup_class(cls):
cls.pydantic_version = importlib.metadata.version("pydantic")
os.system("pip3 install together==0.2.10")
os.system("pip3 install together==1.2.5")
_reload_pydantic()

@classmethod
Expand Down Expand Up @@ -2940,7 +2940,7 @@ class TestMistralAI:
@classmethod
def setup_class(cls):
cls.pydantic_version = importlib.metadata.version("pydantic")
os.system("pip3 install mistralai==0.0.8")
os.system("pip3 install mistralai==0.4.2")
_reload_pydantic()

@classmethod
Expand Down Expand Up @@ -3014,10 +3014,10 @@ def chat_mocked(**kwargs):
"total_tokens": 0,
},
}
return ChatCompletionResponse(**response)
return ChatCompletionResponse(**response) # type: ignore[arg-type]

with create_datadreamer():
llm = MistralAI("mistral-tiny")
llm = MistralAI("mistral-tiny", api_key="fakeapikey")

# Mock Complete.create()
mocker.patch.object(llm.client, "chat", side_effect=chat_mocked)
Expand Down

0 comments on commit 8e546d2

Please sign in to comment.