Skip to content

Commit

Permalink
bump agent strategy version to 0.0.5 (#188)
Browse files Browse the repository at this point in the history
Co-authored-by: Novice Lee <novicelee@NoviPro.local>
  • Loading branch information
Nov1c444 and Novice Lee authored Feb 10, 2025
1 parent 9bf2594 commit 51b33cf
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 66 deletions.
2 changes: 1 addition & 1 deletion agent-strategies/cot_agent/manifest.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version: 0.0.5
version: 0.0.6
type: plugin
author: "langgenius"
name: "agent"
Expand Down
2 changes: 1 addition & 1 deletion agent-strategies/cot_agent/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
dify_plugin~=0.0.1b62
dify_plugin~=0.0.1b63
73 changes: 41 additions & 32 deletions agent-strategies/cot_agent/strategies/ReAct.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
ToolPromptMessage,
UserPromptMessage,
)
from dify_plugin.entities.tool import ToolInvokeMessage, ToolProviderType
from dify_plugin.entities.tool import LogMetadata, ToolInvokeMessage, ToolProviderType
from dify_plugin.interfaces.agent import (
AgentModelConfig,
AgentScratchpadUnit,
Expand Down Expand Up @@ -130,7 +130,7 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
label=f"ROUND {iteration_step}",
data={},
metadata={
"started_at": round_started_at,
LogMetadata.STARTED_AT: round_started_at,
},
status=ToolInvokeMessage.LogMessage.LogStatus.START,
)
Expand Down Expand Up @@ -171,7 +171,10 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
model_log = self.create_log_message(
label=f"{model.model} Thought",
data={},
metadata={"start_at": model_started_at, "provider": model.provider},
metadata={
LogMetadata.STARTED_AT: model_started_at,
LogMetadata.PROVIDER: model.provider,
},
parent=round_log,
status=ToolInvokeMessage.LogMessage.LogStatus.START,
)
Expand All @@ -183,20 +186,19 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
# detect action
assert scratchpad.agent_response is not None
scratchpad.agent_response += json.dumps(chunk.model_dump())

scratchpad.action_str = json.dumps(chunk.model_dump())
scratchpad.action = action
else:
scratchpad.agent_response = scratchpad.agent_response or ""
scratchpad.thought = scratchpad.thought or ""
scratchpad.agent_response += chunk
scratchpad.thought += chunk

scratchpad.thought = (
scratchpad.thought.strip()
if scratchpad.thought
else "I am thinking about how to help you"
)

agent_scratchpad.append(scratchpad)

# get llm usage
Expand All @@ -206,25 +208,27 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
else:
usage_dict["usage"] = LLMUsage.empty_usage()

if not scratchpad.is_final():
pass
action = (
scratchpad.action.to_dict()
if scratchpad.action
else {"action": scratchpad.agent_response}
)

yield self.finish_log_message(
log=model_log,
data={
"output": scratchpad.agent_response,
},
data={"thought": scratchpad.thought, **action},
metadata={
"started_at": model_started_at,
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - model_started_at,
"provider": model.provider,
"total_price": usage_dict["usage"].total_price
LogMetadata.STARTED_AT: model_started_at,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter() - model_started_at,
LogMetadata.PROVIDER: model.provider,
LogMetadata.TOTAL_PRICE: usage_dict["usage"].total_price
if usage_dict["usage"]
else 0,
"currency": usage_dict["usage"].currency
LogMetadata.CURRENCY: usage_dict["usage"].currency
if usage_dict["usage"]
else "",
"total_tokens": usage_dict["usage"].total_tokens
LogMetadata.TOTAL_TOKENS: usage_dict["usage"].total_tokens
if usage_dict["usage"]
else 0,
},
Expand Down Expand Up @@ -252,8 +256,10 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
label=f"CALL {tool_name}",
data={},
metadata={
"started_at": time.perf_counter(),
"provider": tool_instances[tool_name].identity.provider
LogMetadata.STARTED_AT: time.perf_counter(),
LogMetadata.PROVIDER: tool_instances[
tool_name
].identity.provider
if tool_instances.get(tool_name)
else "",
},
Expand All @@ -275,12 +281,15 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
"meta": tool_invoke_meta.to_dict(),
},
metadata={
"started_at": tool_call_started_at,
"provider": tool_instances[tool_name].identity.provider
LogMetadata.STARTED_AT: tool_call_started_at,
LogMetadata.PROVIDER: tool_instances[
tool_name
].identity.provider
if tool_instances.get(tool_name)
else "",
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - tool_call_started_at,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter()
- tool_call_started_at,
},
)

Expand All @@ -297,16 +306,16 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
},
},
metadata={
"started_at": round_started_at,
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - round_started_at,
"total_price": usage_dict["usage"].total_price
LogMetadata.STARTED_AT: round_started_at,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter() - round_started_at,
LogMetadata.TOTAL_PRICE: usage_dict["usage"].total_price
if usage_dict["usage"]
else 0,
"currency": usage_dict["usage"].currency
LogMetadata.CURRENCY: usage_dict["usage"].currency
if usage_dict["usage"]
else "",
"total_tokens": usage_dict["usage"].total_tokens
LogMetadata.TOTAL_TOKENS: usage_dict["usage"].total_tokens
if usage_dict["usage"]
else 0,
},
Expand All @@ -317,13 +326,13 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
yield self.create_json_message(
{
"execution_metadata": {
"total_price": llm_usage["usage"].total_price
LogMetadata.TOTAL_PRICE: llm_usage["usage"].total_price
if llm_usage["usage"] is not None
else 0,
"currency": llm_usage["usage"].currency
LogMetadata.CURRENCY: llm_usage["usage"].currency
if llm_usage["usage"] is not None
else "",
"total_tokens": llm_usage["usage"].total_tokens
LogMetadata.TOTAL_TOKENS: llm_usage["usage"].total_tokens
if llm_usage["usage"] is not None
else 0,
}
Expand Down
76 changes: 44 additions & 32 deletions agent-strategies/cot_agent/strategies/function_calling.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@
AssistantPromptMessage,
PromptMessage,
PromptMessageContentType,
PromptMessageRole,
SystemPromptMessage,
ToolPromptMessage,
UserPromptMessage,
)
from dify_plugin.entities.tool import ToolInvokeMessage, ToolProviderType
from dify_plugin.entities.tool import LogMetadata, ToolInvokeMessage, ToolProviderType
from dify_plugin.interfaces.agent import AgentModelConfig, AgentStrategy, ToolEntity
from pydantic import BaseModel, Field

Expand Down Expand Up @@ -76,7 +77,9 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
query = fc_params.query
self.query = query
instruction = fc_params.instruction
init_prompt_messages = [SystemPromptMessage(content=instruction)]
init_prompt_messages = [
PromptMessage(role=PromptMessageRole.SYSTEM, content=instruction)
]
tools = fc_params.tools
tool_instances = {tool.identity.name: tool for tool in tools} if tools else {}
model = fc_params.model
Expand All @@ -103,7 +106,7 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
label=f"ROUND {iteration_step}",
data={},
metadata={
"started_at": round_started_at,
LogMetadata.STARTED_AT: round_started_at,
},
status=ToolInvokeMessage.LogMessage.LogStatus.START,
)
Expand All @@ -126,7 +129,10 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
model_log = self.create_log_message(
label=f"{model.model} Thought",
data={},
metadata={"start_at": model_started_at, "provider": model.provider},
metadata={
LogMetadata.STARTED_AT: model_started_at,
LogMetadata.PROVIDER: model.provider,
},
parent=round_log,
status=ToolInvokeMessage.LogMessage.LogStatus.START,
)
Expand All @@ -153,10 +159,7 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
current_llm_usage = None

if isinstance(chunks, Generator):
is_first_chunk = True
for chunk in chunks:
if is_first_chunk:
is_first_chunk = False
# check if there is any tool call
if self.check_tool_calls(chunk):
function_call_state = True
Expand Down Expand Up @@ -239,18 +242,22 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
data={
"output": response,
"tool_name": tool_call_names,
"tool_input": tool_call_inputs,
"tool_input": {
tool_call[1]: tool_call[2] for tool_call in tool_calls
},
},
metadata={
"started_at": model_started_at,
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - model_started_at,
"provider": model.provider,
"total_price": current_llm_usage.total_price
LogMetadata.STARTED_AT: model_started_at,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter() - model_started_at,
LogMetadata.PROVIDER: model.provider,
LogMetadata.TOTAL_PRICE: current_llm_usage.total_price
if current_llm_usage
else 0,
"currency": current_llm_usage.currency if current_llm_usage else "",
"total_tokens": current_llm_usage.total_tokens
LogMetadata.CURRENCY: current_llm_usage.currency
if current_llm_usage
else "",
LogMetadata.TOTAL_PRICE: current_llm_usage.total_tokens
if current_llm_usage
else 0,
},
Expand Down Expand Up @@ -284,8 +291,8 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
label=f"CALL {tool_call_name}",
data={},
metadata={
"started_at": time.perf_counter(),
"provider": tool_instance.identity.provider,
LogMetadata.STARTED_AT: time.perf_counter(),
LogMetadata.PROVIDER: tool_instance.identity.provider,
},
parent=round_log,
status=ToolInvokeMessage.LogMessage.LogStatus.START,
Expand Down Expand Up @@ -340,12 +347,14 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
result += f"tool response: {text}."
else:
result += f"tool response: {response.message!r}."
tool_invoke_meta = ToolInvokeMeta.error_instance("")
tool_response = {
"tool_call_id": tool_call_id,
"tool_call_name": tool_call_name,
"tool_call_input": {
**tool_instance.runtime_parameters,
**tool_call_args,
},
"tool_response": result,
"meta": tool_invoke_meta.to_dict(),
}

yield self.finish_log_message(
Expand All @@ -354,10 +363,11 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
"output": tool_response,
},
metadata={
"started_at": tool_call_started_at,
"provider": tool_instance.identity.provider,
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - tool_call_started_at,
LogMetadata.STARTED_AT: tool_call_started_at,
LogMetadata.PROVIDER: tool_instance.identity.provider,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter()
- tool_call_started_at,
},
)
tool_responses.append(tool_response)
Expand All @@ -384,14 +394,16 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
},
},
metadata={
"started_at": round_started_at,
"finished_at": time.perf_counter(),
"elapsed_time": time.perf_counter() - round_started_at,
"total_price": current_llm_usage.total_price
LogMetadata.STARTED_AT: round_started_at,
LogMetadata.FINISHED_AT: time.perf_counter(),
LogMetadata.ELAPSED_TIME: time.perf_counter() - round_started_at,
LogMetadata.TOTAL_PRICE: current_llm_usage.total_price
if current_llm_usage
else 0,
"currency": current_llm_usage.currency if current_llm_usage else "",
"total_tokens": current_llm_usage.total_tokens
LogMetadata.CURRENCY: current_llm_usage.currency
if current_llm_usage
else "",
LogMetadata.TOTAL_PRICE: current_llm_usage.total_tokens
if current_llm_usage
else 0,
},
Expand All @@ -401,13 +413,13 @@ def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
yield self.create_json_message(
{
"execution_metadata": {
"total_price": llm_usage["usage"].total_price
LogMetadata.TOTAL_PRICE: llm_usage["usage"].total_price
if llm_usage["usage"] is not None
else 0,
"currency": llm_usage["usage"].currency
LogMetadata.CURRENCY: llm_usage["usage"].currency
if llm_usage["usage"] is not None
else "",
"total_tokens": llm_usage["usage"].total_tokens
LogMetadata.TOTAL_PRICE: llm_usage["usage"].total_tokens
if llm_usage["usage"] is not None
else 0,
}
Expand Down

0 comments on commit 51b33cf

Please sign in to comment.