Browse Source

add tool response tags

master
Florin Tobler 6 months ago
parent
commit
b8c0979a3e
  1. 4
      llama.py

4
llama.py

@ -10,7 +10,7 @@ messages = []
inference = None inference = None
# systemmessage at the very begin of the chat. Will be concatenated with the automatic tool usage descriptions # systemmessage at the very begin of the chat. Will be concatenated with the automatic tool usage descriptions
systemmessage = "Hold a casual conversation with the user. Keep responses short at max 3 sentences." systemmessage = "Hold a casual conversation with the user. Keep responses short at max 3 sentences. Answer using markdown to the user."
# system message for role flip so the model automatically answers for the user # system message for role flip so the model automatically answers for the user
roleflip = {"role": "system", "content": "Keep the conversation going, ask for more information on the subject. Keep messages short at max 1-2 sentences. Do not thank and say goodbye."} roleflip = {"role": "system", "content": "Keep the conversation going, ask for more information on the subject. Keep messages short at max 1-2 sentences. Do not thank and say goodbye."}
@ -50,7 +50,7 @@ def append_generate_chat(input_text: str, role="user"):
tool_result = parse_and_execute_tool_call(out_text, tool_list) tool_result = parse_and_execute_tool_call(out_text, tool_list)
if tool_result != None: if tool_result != None:
# tool call happened # tool call happened
# tool_result = "<tool_response>%s</tool_response>" % tool_result tool_result = "<tool_response>%s</tool_response>" % tool_result
# depending on the chat template the tool response tags must or must not be passed. :( # depending on the chat template the tool response tags must or must not be passed. :(
append_generate_chat(tool_result, role="tool") append_generate_chat(tool_result, role="tool")

Loading…
Cancel
Save