mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-23 02:18:53 +01:00
function_call-style JSON response from gpt-4, gpt-3_5, codellama, palm-2-chat-bison
This commit is contained in:
@@ -18,25 +18,33 @@ def add_function_calls_to_request(gpt_data, function_calls: FunctionCallSet | No
|
||||
if function_calls is None:
|
||||
return
|
||||
|
||||
if gpt_data['model'] == 'gpt-4':
|
||||
gpt_data['functions'] = function_calls['definitions']
|
||||
if len(function_calls['definitions']) > 1:
|
||||
gpt_data['function_call'] = 'auto'
|
||||
else:
|
||||
gpt_data['function_call'] = {'name': function_calls['definitions'][0]['name']}
|
||||
return
|
||||
model: str = gpt_data['model']
|
||||
is_llama = 'llama' in model
|
||||
|
||||
# if model == 'gpt-4':
|
||||
# gpt_data['functions'] = function_calls['definitions']
|
||||
# if len(function_calls['definitions']) > 1:
|
||||
# gpt_data['function_call'] = 'auto'
|
||||
# else:
|
||||
# gpt_data['function_call'] = {'name': function_calls['definitions'][0]['name']}
|
||||
# return
|
||||
|
||||
# prompter = CompletionModelPrompter()
|
||||
# prompter = InstructModelPrompter()
|
||||
prompter = LlamaInstructPrompter()
|
||||
prompter = JsonPrompter(is_llama)
|
||||
|
||||
if len(function_calls['definitions']) > 1:
|
||||
function_call = None
|
||||
else:
|
||||
function_call = function_calls['definitions'][0]['name']
|
||||
|
||||
role = 'user' if '/' in model else 'system'
|
||||
# role = 'user'
|
||||
# role = 'system'
|
||||
# is_llama = True
|
||||
|
||||
gpt_data['messages'].append({
|
||||
'role': 'user',
|
||||
'role': role,
|
||||
'content': prompter.prompt('', function_calls['definitions'], function_call)
|
||||
})
|
||||
|
||||
@@ -54,7 +62,7 @@ def parse_agent_response(response, function_calls: FunctionCallSet | None):
|
||||
"""
|
||||
|
||||
if function_calls:
|
||||
text = re.sub(r'^```json\n', '', response['text'])
|
||||
text = re.sub(r'^.*```json\s*', '', response['text'], flags=re.DOTALL)
|
||||
values = list(json.loads(text.strip('` \n')).values())
|
||||
if len(values) == 1:
|
||||
return values[0]
|
||||
@@ -64,11 +72,12 @@ def parse_agent_response(response, function_calls: FunctionCallSet | None):
|
||||
return response['text']
|
||||
|
||||
|
||||
class LlamaInstructPrompter:
|
||||
class JsonPrompter:
|
||||
"""
|
||||
A prompter for Llama2 instruct models.
|
||||
Adapted from local_llm_function_calling
|
||||
"""
|
||||
def __init__(self, is_llama: bool = False):
|
||||
self.is_llama = is_llama
|
||||
|
||||
def function_descriptions(
|
||||
self, functions: list[FunctionType], function_to_call: str
|
||||
@@ -177,7 +186,9 @@ class LlamaInstructPrompter:
|
||||
"Help choose the appropriate function to call to answer the user's question."
|
||||
if function_to_call is None
|
||||
else f"Define the arguments for {function_to_call} to answer the user's question."
|
||||
) + "In your response you must only use JSON output and provide no notes or commentary."
|
||||
# ) + "\nYou must return a JSON object without notes or commentary."
|
||||
) + " \nIn your response you must only use JSON output and provide no explanation or commentary."
|
||||
|
||||
data = (
|
||||
self.function_data(functions, function_to_call)
|
||||
if function_to_call
|
||||
@@ -188,6 +199,8 @@ class LlamaInstructPrompter:
|
||||
if function_to_call
|
||||
else "Here's the function the user should call: "
|
||||
)
|
||||
return f"[INST] <<SYS>>\n{system}\n\n{data}\n<</SYS>>\n\n{prompt} [/INST]"
|
||||
# {response_start}"
|
||||
|
||||
if self.is_llama:
|
||||
return f"[INST] <<SYS>>\n{system}\n\n{data}\n<</SYS>>\n\n{prompt} [/INST]"
|
||||
else:
|
||||
return f"{system}\n\n{data}\n\n{prompt}"
|
||||
|
||||
Reference in New Issue
Block a user