Skip to content

Commit

Permalink
update ReAct example for internlm2 (#85)
Browse files Browse the repository at this point in the history
* update ReAct example for internlm2

* update ReAct example for internlm2

* update base_llm

* rename file

* update readme

* update meta_template
  • Loading branch information
liujiangning30 authored Jan 23, 2024
1 parent 6b28760 commit c897296
Show file tree
Hide file tree
Showing 4 changed files with 83 additions and 41 deletions.
20 changes: 15 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,24 +121,34 @@ from lagent.agents import ReAct
from lagent.actions import ActionExecutor, GoogleSearch, PythonInterpreter
from lagent.llms import HFTransformer

# Initialize the HFTransformer-based Language Model (llm) and provide the model name.
llm = HFTransformer('internlm/internlm-chat-7b-v1_1')
from lagent.llms.meta_template import INTERNLM2_META as META

# Initialize the HFTransformer-based Language Model (llm) and
# provide the model name.
llm = HFTransformer(
path='internlm/internlm2-chat-7b',
meta_template=META
)

# Initialize the Google Search tool and provide your API key.
search_tool = GoogleSearch(api_key='Your SERPER_API_KEY')
search_tool = GoogleSearch(
api_key='Your SERPER_API_KEY')

# Initialize the Python Interpreter tool.
python_interpreter = PythonInterpreter()

# Create a chatbot by configuring the ReAct agent.
# Specify the actions the chatbot can perform.
chatbot = ReAct(
llm=llm, # Provide the Language Model instance.
action_executor=ActionExecutor(
actions=[search_tool, python_interpreter] # Specify the actions the chatbot can perform.
actions=[python_interpreter]
),
)
# Ask the chatbot a mathematical question in LaTeX format.
response = chatbot.chat('若$z=-1+\sqrt{3}i$,则$\frac{z}{{z\overline{z}-1}}=\left(\ \ \right)$')
response = chatbot.chat(
'若$z=-1+\sqrt{3}i$,则$\frac{z}{{z\overline{z}-1}}=\left(\ \ \right)$'
)

# Print the chatbot's response.
print(response.response) # Output the response generated by the chatbot.
Expand Down
56 changes: 23 additions & 33 deletions examples/hf_react_example.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,27 @@
from lagent.actions.action_executor import ActionExecutor
from lagent.actions.python_interpreter import PythonInterpreter
from lagent.agents.react import ReAct
from lagent.llms.huggingface import HFTransformer
# Import necessary modules and classes from the 'lagent' library.
from lagent.actions import ActionExecutor, GoogleSearch, PythonInterpreter
from lagent.agents import ReAct
from lagent.llms import HFTransformer
from lagent.llms.meta_template import INTERNLM2_META as META

model = HFTransformer(
path='internlm/internlm-chat-7b-v1_1',
meta_template=[
dict(role='system', begin='<|System|>:', end='<TOKENS_UNUSED_2>\n'),
dict(role='user', begin='<|User|>:', end='<eoh>\n'),
dict(role='assistant', begin='<|Bot|>:', end='<eoa>\n', generate=True)
],
)

chatbot = ReAct(
llm=model,
action_executor=ActionExecutor(actions=[PythonInterpreter()]),
)
# Initialize the HFTransformer-based Language Model (llm) and
# provide the model name.
llm = HFTransformer(path='internlm/internlm2-chat-7b', meta_template=META)

# Initialize the Google Search tool and provide your API key.
search_tool = GoogleSearch(api_key='Your SERPER_API_KEY')

def input_prompt():
print('\ndouble enter to end input >>> ', end='')
sentinel = '' # ends when this string is seen
return '\n'.join(iter(input, sentinel))
# Initialize the Python Interpreter tool.
python_interpreter = PythonInterpreter()


while True:
try:
prompt = input_prompt()
except UnicodeDecodeError:
print('UnicodeDecodeError')
continue
if prompt == 'exit':
exit(0)

agent_return = chatbot.chat(prompt)
print(agent_return.response)
# Create a chatbot by configuring the ReAct agent.
# Specify the actions the chatbot can perform.
chatbot = ReAct(
llm=llm, # Provide the Language Model instance.
action_executor=ActionExecutor(actions=[python_interpreter]),
)
# Ask the chatbot a mathematical question in LaTeX format.
response = chatbot.chat(
'若$z=-1+\\sqrt{3}i$,则$\frac{z}{{z\\overline{z}-1}}=\\left(\\ \\ \right)$')
# Print the chatbot's response.
print(response.response) # Output the response generated by the chatbot.
8 changes: 5 additions & 3 deletions lagent/llms/huggingface.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,11 @@ def generate_from_template(self, templates, max_out_len: int, **kwargs):
"""
inputs = self.parse_template(templates)
response = self.generate(inputs, max_out_len=max_out_len, **kwargs)
return response.replace(
self.template_parser.roles['assistant']['end'].strip(),
'').strip()
end_token = self.template_parser.meta_template[0]['end'].strip()
# return response.replace(
# self.template_parser.roles['assistant']['end'].strip(),
# '').strip()
return response.split(end_token.strip())[0]


class HFTransformerCasualLM(HFTransformer):
Expand Down
40 changes: 40 additions & 0 deletions lagent/llms/meta_template.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
INTERNLM2_META = [
dict(
role='system',
begin=dict(
with_name='<|im_start|>system name={name}\n',
without_name='<|im_start|>system\n',
name={
'interpreter': '<|interpreter|>',
'plugin': '<|plugin|>',
}),
end='<|im_end|>\n',
),
dict(
role='user',
begin=dict(
with_name='<|im_start|>user name={name}\n',
without_name='<|im_start|>user\n',
),
end='<|im_end|>\n'),
dict(
role='assistant',
begin=dict(
with_name='<|im_start|>assistant name={name}\n',
without_name='<|im_start|>assistant\n',
name={
'interpreter': '<|interpreter|>',
'plugin': '<|plugin|>',
}),
end='<|im_end|>\n'),
dict(
role='environment',
begin=dict(
with_name='<|im_start|>environment name={name}\n',
without_name='<|im_start|>environment\n',
name={
'interpreter': '<|interpreter|>',
'plugin': '<|plugin|>',
}),
end='<|im_end|>\n'),
]

0 comments on commit c897296

Please sign in to comment.