mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-06 03:09:33 +01:00
Removed execute_chat_prompt function
This commit is contained in:
@@ -156,53 +156,3 @@ def generate_messages_from_custom_conversation(role, messages, start_role='user'
|
||||
result.append({"role": "assistant" if start_role == "user" else "user", "content": message})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def execute_chat_prompt(prompt_file, prompt_data, chat_type, previous_messages=None, function_calls=None):
|
||||
# Generate a prompt for the completion type.
|
||||
prompt = get_prompt(prompt_file, prompt_data)
|
||||
new_message = {"role": "user", "content": prompt}
|
||||
|
||||
if previous_messages:
|
||||
# Use the provided previous_messages instead of the default system message.
|
||||
messages = previous_messages + [new_message]
|
||||
else:
|
||||
# Use the default system message.
|
||||
messages = [
|
||||
get_sys_message(find_role_from_step(chat_type)),
|
||||
new_message,
|
||||
]
|
||||
|
||||
# TODO remove this once the database is set up properly
|
||||
for message in messages:
|
||||
if isinstance(message['content'], list):
|
||||
message['content'] = '\n'.join(message['content'])
|
||||
else:
|
||||
message['content'] = str(message['content'])
|
||||
# TODO END
|
||||
|
||||
response = create_gpt_chat_completion(messages, chat_type, function_calls=function_calls)
|
||||
|
||||
# TODO handle errors from OpenAI
|
||||
if response == {}:
|
||||
raise Exception("OpenAI API error happened.")
|
||||
|
||||
# TODO we need to specify the response when there is a function called
|
||||
# TODO maybe we can have a specific function that creates the GPT response from the function call
|
||||
messages.append({"role": "assistant", "content": response['text'] if 'text' in response else str(response['function_calls']['name'])})
|
||||
print_msg = capitalize_first_word_with_underscores(chat_type)
|
||||
print(colored(f"{print_msg}:\n", "green"))
|
||||
print(f"{response['text'] if 'text' in response else str(response['function_calls']['name'])}\n")
|
||||
logger.info(f"{print_msg}: {response}\n")
|
||||
|
||||
if 'function_calls' in response and function_calls is not None:
|
||||
if 'send_messages_and_step' in function_calls:
|
||||
response['function_calls']['arguments']['previous_messages'] = messages
|
||||
response['function_calls']['arguments']['current_step'] = chat_type
|
||||
response, msgs = function_calls['functions'][response['function_calls']['name']](**response['function_calls']['arguments'])
|
||||
if msgs is not None:
|
||||
messages = msgs
|
||||
elif 'text' in response:
|
||||
response = response['text']
|
||||
|
||||
return response, messages
|
||||
|
||||
Reference in New Issue
Block a user