Replies: 1 comment 3 replies
-
Hey @Sagar0070! I'm here to assist you with any bugs, questions, or contributions you have in mind. Let's tackle this together! Yes, you can get the def import_tiktoken() -> Any:
"""Import tiktoken for counting tokens for OpenAI models."""
return guard_import("tiktoken")
def get_num_tokens(string: str, openai_model_name: str) -> int:
"""Calculate num tokens for OpenAI with tiktoken package.
Official documentation: https://github.com/openai/openai-cookbook/blob/main
/examples/How_to_count_tokens_with_tiktoken.ipynb
"""
tiktoken = import_tiktoken()
encoding = tiktoken.encoding_for_model(openai_model_name)
num_tokens = len(encoding.encode(string))
return num_tokens Additionally, the def get_token_ids(self, text: str) -> List[int]:
"""Get the tokens present in the text with tiktoken package."""
if sys.version_info[1] <= 7:
return super().get_token_ids(text)
_, encoding_model = self._get_encoding_model()
return encoding_model.encode(text)
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
"""Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package."""
if sys.version_info[1] <= 7:
return super().get_num_tokens_from_messages(messages)
model, encoding = self._get_encoding_model()
if model.startswith("gpt-3.5-turbo-0301"):
tokens_per_message = 4
tokens_per_name = -1
elif model.startswith("gpt-3.5-turbo") or model.startswith("gpt-4"):
tokens_per_message = 3
tokens_per_name = 1
else:
raise NotImplementedError(
f"get_num_tokens_from_messages() is not presently implemented "
f"for model {model}."
)
num_tokens = 0
messages_dict = [convert_message_to_dict(m) for m in messages]
for message in messages_dict:
num_tokens += tokens_per_message
for key, value in message.items():
num_tokens += len(encoding.encode(str(value)))
if key == "name":
num_tokens += tokens_per_name
num_tokens += 3
return num_tokens These methods use the |
Beta Was this translation helpful? Give feedback.
3 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
-
Checked other resources
Commit to Help
Example Code
Description
Say i created a agent_executor using the below code:
agent = create_openai_tools_agent(chat_llm, tools, prompt)
agent_executor = AgentExecutor(
agent=agent, tools=tools, verbose=False, memory=memory, return_intermediate_steps=True, early_stopping_method="generate",
max_iterations=CUSTOMER_BOT_CONFIG["max_iterations"],
max_execution_time=CUSTOMER_BOT_CONFIG["max_execution_time"],
)
System Info
langchain==0.1.3
langchain-community==0.0.15
langchain-core==0.2.0
langchain-experimental==0.0.49
langchain-openai==0.0.3
Beta Was this translation helpful? Give feedback.
All reactions