mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-16 23:56:19 +01:00
Hardcoded fix
This commit is contained in:
@@ -22,12 +22,14 @@ class AgentConvo:
|
||||
self.messages.append(get_sys_message(self.agent.role))
|
||||
|
||||
def send_message(self, prompt_path=None, prompt_data=None, function_calls=None):
|
||||
|
||||
# craft message
|
||||
if prompt_path is not None and prompt_data is not None:
|
||||
prompt = get_prompt(prompt_path, prompt_data)
|
||||
self.messages.append({"role": "user", "content": prompt})
|
||||
|
||||
if function_calls is not None and 'function_calls' in function_calls:
|
||||
self.messages[-1]['content'] += '\nMAKE SURE THAT YOU RESPOND WITH A CORRECT JSON FORMAT!!!'
|
||||
|
||||
# check if we already have the LLM response saved
|
||||
self.agent.project.llm_req_num += 1
|
||||
development_step = get_development_step_from_hash_id(self.agent.project, prompt_path, prompt_data, self.agent.project.llm_req_num)
|
||||
@@ -130,3 +132,6 @@ class AgentConvo:
|
||||
content = file.read()
|
||||
process = subprocess.Popen('pbcopy', stdin=subprocess.PIPE)
|
||||
process.communicate(content.replace('{{messages}}', str(self.messages)).encode('utf-8'))
|
||||
|
||||
def remove_last_x_messages(self, x):
|
||||
self.messages = self.messages[:-x]
|
||||
@@ -169,9 +169,11 @@ def stream_gpt_completion(data, req_type):
|
||||
if 'name' in json_line['function_call']:
|
||||
function_calls['name'] = json_line['function_call']['name']
|
||||
print(f'Function call: {function_calls["name"]}')
|
||||
|
||||
if 'arguments' in json_line['function_call']:
|
||||
function_calls['arguments'] += json_line['function_call']['arguments']
|
||||
print(json_line['function_call']['arguments'], end='', flush=True)
|
||||
|
||||
if 'content' in json_line:
|
||||
content = json_line.get('content')
|
||||
if content:
|
||||
|
||||
Reference in New Issue
Block a user