tuned prompt
This commit is contained in:
8
llama.py
8
llama.py
@@ -13,7 +13,7 @@ register_dummy()
|
||||
def initialize_config(inference: Inference) -> Terminal:
|
||||
|
||||
# systemmessage at the very begin of the chat. Will be concatenated with the automatic tool usage descriptions
|
||||
system_prompt = "Hold a casual conversation with the user. Keep responses short at max 5 sentences and on point. Answer using markdown to the user. When providing code examples, avoid comments which provide no additional information."
|
||||
system_prompt = "Hold a casual conversation with the user. Keep responses short at max 5 sentences and on point. Answer using markdown to the user. When providing code examples, avoid comments which provide no additional information. Do not summarize."
|
||||
current_date_and_time = datetime.datetime.now().strftime("Current date is %Y-%m-%d and its %H:%M %p right now.")
|
||||
append_toolcalls = False
|
||||
if append_toolcalls:
|
||||
@@ -25,11 +25,11 @@ def initialize_config(inference: Inference) -> Terminal:
|
||||
|
||||
# system message for role flip so the model automatically answers for the user
|
||||
terminal.roleflip = msg("system", "Keep the conversation going, ask for more information on the subject. Keep messages short at max 1-2 sentences. Do not thank and say goodbye.")
|
||||
|
||||
|
||||
# system messages and user message to bring the model to summarize the entire conversation
|
||||
terminal.summarize = msg("system", "Summarize the conversation as a single, cohesive paragraph. Avoid using any bullet points, numbers, or list formatting. Write in plain text with natural sentences that flow together seamlessly.")
|
||||
terminal.summarize_user = msg("system", "Can you summarize the conversation?")
|
||||
|
||||
|
||||
# system message to create a conversation title
|
||||
terminal.title_prompt = msg("system", "Please create a very short and descriptive title or label for this conversation. Maximum 2-5 words. Use only plain text, avoid numbering, special characters, or unnecessary formatting-focus on clarity and brevity.")
|
||||
return terminal
|
||||
@@ -37,7 +37,7 @@ def initialize_config(inference: Inference) -> Terminal:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
|
||||
inference = Inference(model_selection.get_model())
|
||||
terminal = initialize_config(inference)
|
||||
terminal.join()
|
Reference in New Issue
Block a user