-
Notifications
You must be signed in to change notification settings - Fork 207
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
update ReAct example for internlm2 (#85)
* update ReAct example for internlm2 * update ReAct example for internlm2 * update base_llm * rename file * update readme * update meta_template
- Loading branch information
1 parent
6b28760
commit c897296
Showing
4 changed files
with
83 additions
and
41 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,37 +1,27 @@ | ||
from lagent.actions.action_executor import ActionExecutor | ||
from lagent.actions.python_interpreter import PythonInterpreter | ||
from lagent.agents.react import ReAct | ||
from lagent.llms.huggingface import HFTransformer | ||
# Import necessary modules and classes from the 'lagent' library. | ||
from lagent.actions import ActionExecutor, GoogleSearch, PythonInterpreter | ||
from lagent.agents import ReAct | ||
from lagent.llms import HFTransformer | ||
from lagent.llms.meta_template import INTERNLM2_META as META | ||
|
||
model = HFTransformer( | ||
path='internlm/internlm-chat-7b-v1_1', | ||
meta_template=[ | ||
dict(role='system', begin='<|System|>:', end='<TOKENS_UNUSED_2>\n'), | ||
dict(role='user', begin='<|User|>:', end='<eoh>\n'), | ||
dict(role='assistant', begin='<|Bot|>:', end='<eoa>\n', generate=True) | ||
], | ||
) | ||
|
||
chatbot = ReAct( | ||
llm=model, | ||
action_executor=ActionExecutor(actions=[PythonInterpreter()]), | ||
) | ||
# Initialize the HFTransformer-based Language Model (llm) and | ||
# provide the model name. | ||
llm = HFTransformer(path='internlm/internlm2-chat-7b', meta_template=META) | ||
|
||
# Initialize the Google Search tool and provide your API key. | ||
search_tool = GoogleSearch(api_key='Your SERPER_API_KEY') | ||
|
||
def input_prompt(): | ||
print('\ndouble enter to end input >>> ', end='') | ||
sentinel = '' # ends when this string is seen | ||
return '\n'.join(iter(input, sentinel)) | ||
# Initialize the Python Interpreter tool. | ||
python_interpreter = PythonInterpreter() | ||
|
||
|
||
while True: | ||
try: | ||
prompt = input_prompt() | ||
except UnicodeDecodeError: | ||
print('UnicodeDecodeError') | ||
continue | ||
if prompt == 'exit': | ||
exit(0) | ||
|
||
agent_return = chatbot.chat(prompt) | ||
print(agent_return.response) | ||
# Create a chatbot by configuring the ReAct agent. | ||
# Specify the actions the chatbot can perform. | ||
chatbot = ReAct( | ||
llm=llm, # Provide the Language Model instance. | ||
action_executor=ActionExecutor(actions=[python_interpreter]), | ||
) | ||
# Ask the chatbot a mathematical question in LaTeX format. | ||
response = chatbot.chat( | ||
'若$z=-1+\\sqrt{3}i$,则$\frac{z}{{z\\overline{z}-1}}=\\left(\\ \\ \right)$') | ||
# Print the chatbot's response. | ||
print(response.response) # Output the response generated by the chatbot. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
INTERNLM2_META = [ | ||
dict( | ||
role='system', | ||
begin=dict( | ||
with_name='<|im_start|>system name={name}\n', | ||
without_name='<|im_start|>system\n', | ||
name={ | ||
'interpreter': '<|interpreter|>', | ||
'plugin': '<|plugin|>', | ||
}), | ||
end='<|im_end|>\n', | ||
), | ||
dict( | ||
role='user', | ||
begin=dict( | ||
with_name='<|im_start|>user name={name}\n', | ||
without_name='<|im_start|>user\n', | ||
), | ||
end='<|im_end|>\n'), | ||
dict( | ||
role='assistant', | ||
begin=dict( | ||
with_name='<|im_start|>assistant name={name}\n', | ||
without_name='<|im_start|>assistant\n', | ||
name={ | ||
'interpreter': '<|interpreter|>', | ||
'plugin': '<|plugin|>', | ||
}), | ||
end='<|im_end|>\n'), | ||
dict( | ||
role='environment', | ||
begin=dict( | ||
with_name='<|im_start|>environment name={name}\n', | ||
without_name='<|im_start|>environment\n', | ||
name={ | ||
'interpreter': '<|interpreter|>', | ||
'plugin': '<|plugin|>', | ||
}), | ||
end='<|im_end|>\n'), | ||
] |