mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-01 17:09:59 +01:00
Refactored AgentConvo.postprecess_response() into function_calling.process_json_response()
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import re
|
||||
# from local_llm_function_calling import Generator
|
||||
# from local_llm_function_calling.model.llama import LlamaModel
|
||||
# from local_llm_function_calling.model.huggingface import HuggingfaceModel
|
||||
@@ -40,6 +41,29 @@ def add_function_calls_to_request(gpt_data, function_calls: FunctionCallSet | No
|
||||
})
|
||||
|
||||
|
||||
def parse_agent_response(response, function_calls: FunctionCallSet | None):
|
||||
"""
|
||||
Post-processes the response from the agent.
|
||||
|
||||
Args:
|
||||
response: The response from the agent.
|
||||
function_calls: Optional function calls associated with the response.
|
||||
|
||||
Returns:
|
||||
The post-processed response.
|
||||
"""
|
||||
|
||||
if function_calls:
|
||||
text = re.sub(r'^```json\n', '', response['text'])
|
||||
values = list(json.loads(text.strip('` \n')).values())
|
||||
if len(values) == 1:
|
||||
return values[0]
|
||||
else:
|
||||
return tuple(values)
|
||||
|
||||
return response['text']
|
||||
|
||||
|
||||
class LlamaInstructPrompter:
|
||||
"""
|
||||
A prompter for Llama2 instruct models.
|
||||
|
||||
Reference in New Issue
Block a user