Skip to content

Commit

Permalink
Bump version [release]
Browse files Browse the repository at this point in the history
  • Loading branch information
AjayP13 committed Mar 15, 2024
1 parent 1c0e567 commit 9d834d5
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 7 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "DataDreamer"
version = "0.22.0"
version = "0.23.0"
description = "Prompt. Generate Synthetic Data. Train & Align Models."
license = "MIT"
authors= [
Expand Down
2 changes: 1 addition & 1 deletion src/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,5 @@ accelerate>=0.26.1,<1.0.0
transformers>=4.37.1,<4.50.0
ctransformers>=0.2.27,<1.0.0
Pyro5>=5.15
litellm==1.29.5
litellm==1.31.14
trl==0.7.6
4 changes: 2 additions & 2 deletions src/tests/embedders/test_embedders.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,8 @@ def test_run(self, create_datadreamer):
results = model.run(texts=["A test sentence.", "Another test sentence."])
assert isinstance(results, list)
assert len(results[0]) == 1024
assert results[0][0] == pytest.approx(0.01616307906806469, 0.0001)
assert results[1][0] == pytest.approx(0.008139288984239101, 0.0001)
assert results[0][0] == pytest.approx(0.016176335513591766, 0.01)
assert results[1][0] == pytest.approx(0.008149503730237484, 0.01)

# Test truncate
with pytest.raises(ValueError):
Expand Down
5 changes: 2 additions & 3 deletions src/tests/llms/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -2178,10 +2178,9 @@ def test_count_tokens(self, create_datadreamer):
def test_get_max_context_length(self, create_datadreamer):
with create_datadreamer():
llm = LiteLLM("gpt-3.5-turbo-instruct")
# LiteLLM gets this wrong, it should be 4096
assert llm.get_max_context_length(max_new_tokens=0) == 8192
assert llm.get_max_context_length(max_new_tokens=0) == 4096
llm = LiteLLM("gpt-3.5-turbo")
assert llm.get_max_context_length(max_new_tokens=0) == 4097
assert llm.get_max_context_length(max_new_tokens=0) == 4096

@pytest.mark.skipif(
"OPENAI_API_KEY" not in os.environ, reason="requires OpenAI API key"
Expand Down

0 comments on commit 9d834d5

Please sign in to comment.