mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-02-23 15:49:50 +01:00
ARCHITECTURE function_calls works on meta-llama/codellama-34b-instruct
This commit is contained in:
@@ -1,11 +1,10 @@
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import uuid
|
||||
from utils.style import yellow, yellow_bold
|
||||
|
||||
from database.database import get_saved_development_step, save_development_step, delete_all_subsequent_steps
|
||||
from helpers.files import get_files_content
|
||||
from const.common import IGNORE_FOLDERS
|
||||
from helpers.exceptions.TokenLimitError import TokenLimitError
|
||||
from utils.utils import array_of_objects_to_string, get_prompt
|
||||
from utils.llm_connection import create_gpt_chat_completion
|
||||
@@ -188,10 +187,17 @@ class AgentConvo:
|
||||
"""
|
||||
if 'function_calls' in response and function_calls is not None:
|
||||
if 'send_convo' in function_calls:
|
||||
response['function_calls']['arguments']['convo'] = self
|
||||
response['function_calls']['arguments']['convo'] = self
|
||||
response = function_calls['functions'][response['function_calls']['name']](**response['function_calls']['arguments'])
|
||||
elif 'text' in response:
|
||||
response = response['text']
|
||||
if function_calls:
|
||||
values = list(json.loads(response['text']).values())
|
||||
if len(values) == 1:
|
||||
return values[0]
|
||||
else:
|
||||
return tuple(values)
|
||||
else:
|
||||
response = response['text']
|
||||
|
||||
return response
|
||||
|
||||
|
||||
Reference in New Issue
Block a user