mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-06 11:19:33 +01:00
tidy up
This commit is contained in:
@@ -1,119 +1,119 @@
|
||||
# import re
|
||||
# import os
|
||||
# from unittest.mock import patch, Mock, MagicMock
|
||||
# from dotenv import load_dotenv
|
||||
# load_dotenv()
|
||||
#
|
||||
# from .CodeMonkey import CodeMonkey
|
||||
# from .Developer import Developer
|
||||
# from database.models.files import File
|
||||
# from database.models.development_steps import DevelopmentSteps
|
||||
# from helpers.Project import Project, update_file, clear_directory
|
||||
# from helpers.AgentConvo import AgentConvo
|
||||
# from test.test_utils import mock_terminal_size
|
||||
#
|
||||
# SEND_TO_LLM = False
|
||||
# WRITE_TO_FILE = False
|
||||
#
|
||||
#
|
||||
# class TestCodeMonkey:
|
||||
# def setup_method(self):
|
||||
# name = 'TestDeveloper'
|
||||
# self.project = Project({
|
||||
# 'app_id': 'test-developer',
|
||||
# 'name': name,
|
||||
# 'app_type': ''
|
||||
# },
|
||||
# name=name,
|
||||
# architecture=[],
|
||||
# user_stories=[],
|
||||
# current_step='coding',
|
||||
# )
|
||||
#
|
||||
# self.project.root_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
# '../../../workspace/TestDeveloper'))
|
||||
# self.project.technologies = []
|
||||
# last_step = DevelopmentSteps()
|
||||
# last_step.id = 1
|
||||
# self.project.checkpoints = {'last_development_step': last_step}
|
||||
# self.project.app = None
|
||||
# self.developer = Developer(self.project)
|
||||
# self.codeMonkey = CodeMonkey(self.project, developer=self.developer)
|
||||
#
|
||||
# @patch('helpers.AgentConvo.get_saved_development_step', return_value=None)
|
||||
# @patch('helpers.AgentConvo.save_development_step', return_value=None)
|
||||
# @patch('os.get_terminal_size', mock_terminal_size)
|
||||
# @patch.object(File, 'insert')
|
||||
# def test_implement_code_changes(self, mock_get_dev, mock_save_dev, mock_file_insert):
|
||||
# # Given
|
||||
# code_changes_description = "Write the word 'Washington' to a .txt file"
|
||||
#
|
||||
# if SEND_TO_LLM:
|
||||
# convo = AgentConvo(self.codeMonkey)
|
||||
# else:
|
||||
# convo = MagicMock()
|
||||
# mock_responses = [
|
||||
# # [],
|
||||
# [{
|
||||
# 'content': 'Washington',
|
||||
# 'description': "A new .txt file with the word 'Washington' in it.",
|
||||
# 'name': 'washington.txt',
|
||||
# 'path': 'washington.txt'
|
||||
# }]
|
||||
# ]
|
||||
# convo.send_message.side_effect = mock_responses
|
||||
#
|
||||
# if WRITE_TO_FILE:
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
# else:
|
||||
# # don't write the file, just
|
||||
# with patch.object(Project, 'save_file') as mock_save_file:
|
||||
# # When
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
#
|
||||
# # Then
|
||||
# mock_save_file.assert_called_once()
|
||||
# called_data = mock_save_file.call_args[0][0]
|
||||
# assert re.match(r'\w+\.txt$', called_data['name'])
|
||||
# assert (called_data['path'] == '/' or called_data['path'] == called_data['name'])
|
||||
# assert called_data['content'] == 'Washington'
|
||||
#
|
||||
# @patch('helpers.AgentConvo.get_saved_development_step', return_value=None)
|
||||
# @patch('helpers.AgentConvo.save_development_step', return_value=None)
|
||||
# @patch('os.get_terminal_size', mock_terminal_size)
|
||||
# @patch.object(File, 'insert')
|
||||
# def test_implement_code_changes_with_read(self, mock_get_dev, mock_save_dev, mock_file_insert):
|
||||
# # Given
|
||||
# code_changes_description = "Read the file called file_to_read.txt and write its content to a file called output.txt"
|
||||
# workspace = self.project.root_path
|
||||
# update_file(os.path.join(workspace, 'file_to_read.txt'), 'Hello World!\n')
|
||||
#
|
||||
# if SEND_TO_LLM:
|
||||
# convo = AgentConvo(self.codeMonkey)
|
||||
# else:
|
||||
# convo = MagicMock()
|
||||
# mock_responses = [
|
||||
# # ['file_to_read.txt', 'output.txt'],
|
||||
# [{
|
||||
# 'content': 'Hello World!\n',
|
||||
# 'description': 'This file is the output file. The content of file_to_read.txt is copied into this file.',
|
||||
# 'name': 'output.txt',
|
||||
# 'path': 'output.txt'
|
||||
# }]
|
||||
# ]
|
||||
# convo.send_message.side_effect = mock_responses
|
||||
#
|
||||
# if WRITE_TO_FILE:
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
# else:
|
||||
# with patch.object(Project, 'save_file') as mock_save_file:
|
||||
# # When
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
#
|
||||
# # Then
|
||||
# clear_directory(workspace)
|
||||
# mock_save_file.assert_called_once()
|
||||
# called_data = mock_save_file.call_args[0][0]
|
||||
# assert called_data['name'] == 'output.txt'
|
||||
# assert (called_data['path'] == '/' or called_data['path'] == called_data['name'])
|
||||
# assert called_data['content'] == 'Hello World!\n'
|
||||
import re
|
||||
import os
|
||||
from unittest.mock import patch, Mock, MagicMock
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
|
||||
from .CodeMonkey import CodeMonkey
|
||||
from .Developer import Developer
|
||||
from database.models.files import File
|
||||
from database.models.development_steps import DevelopmentSteps
|
||||
from helpers.Project import Project, update_file, clear_directory
|
||||
from helpers.AgentConvo import AgentConvo
|
||||
from test.test_utils import mock_terminal_size
|
||||
|
||||
SEND_TO_LLM = False
|
||||
WRITE_TO_FILE = False
|
||||
|
||||
|
||||
class TestCodeMonkey:
|
||||
def setup_method(self):
|
||||
name = 'TestDeveloper'
|
||||
self.project = Project({
|
||||
'app_id': 'test-developer',
|
||||
'name': name,
|
||||
'app_type': ''
|
||||
},
|
||||
name=name,
|
||||
architecture=[],
|
||||
user_stories=[],
|
||||
current_step='coding',
|
||||
)
|
||||
|
||||
self.project.root_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
'../../../workspace/TestDeveloper'))
|
||||
self.project.technologies = []
|
||||
last_step = DevelopmentSteps()
|
||||
last_step.id = 1
|
||||
self.project.checkpoints = {'last_development_step': last_step}
|
||||
self.project.app = None
|
||||
self.developer = Developer(self.project)
|
||||
self.codeMonkey = CodeMonkey(self.project, developer=self.developer)
|
||||
|
||||
@patch('helpers.AgentConvo.get_saved_development_step', return_value=None)
|
||||
@patch('helpers.AgentConvo.save_development_step', return_value=None)
|
||||
@patch('os.get_terminal_size', mock_terminal_size)
|
||||
@patch.object(File, 'insert')
|
||||
def test_implement_code_changes(self, mock_get_dev, mock_save_dev, mock_file_insert):
|
||||
# Given
|
||||
code_changes_description = "Write the word 'Washington' to a .txt file"
|
||||
|
||||
if SEND_TO_LLM:
|
||||
convo = AgentConvo(self.codeMonkey)
|
||||
else:
|
||||
convo = MagicMock()
|
||||
mock_responses = [
|
||||
# [],
|
||||
[{
|
||||
'content': 'Washington',
|
||||
'description': "A new .txt file with the word 'Washington' in it.",
|
||||
'name': 'washington.txt',
|
||||
'path': 'washington.txt'
|
||||
}]
|
||||
]
|
||||
convo.send_message.side_effect = mock_responses
|
||||
|
||||
if WRITE_TO_FILE:
|
||||
self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
else:
|
||||
# don't write the file, just
|
||||
with patch.object(Project, 'save_file') as mock_save_file:
|
||||
# When
|
||||
self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
|
||||
# Then
|
||||
mock_save_file.assert_called_once()
|
||||
called_data = mock_save_file.call_args[0][0]
|
||||
assert re.match(r'\w+\.txt$', called_data['name'])
|
||||
assert (called_data['path'] == '/' or called_data['path'] == called_data['name'])
|
||||
assert called_data['content'] == 'Washington'
|
||||
|
||||
# @patch('helpers.AgentConvo.get_saved_development_step', return_value=None)
|
||||
# @patch('helpers.AgentConvo.save_development_step', return_value=None)
|
||||
# @patch('os.get_terminal_size', mock_terminal_size)
|
||||
# @patch.object(File, 'insert')
|
||||
# def test_implement_code_changes_with_read(self, mock_get_dev, mock_save_dev, mock_file_insert):
|
||||
# # Given
|
||||
# code_changes_description = "Read the file called file_to_read.txt and write its content to a file called output.txt"
|
||||
# workspace = self.project.root_path
|
||||
# update_file(os.path.join(workspace, 'file_to_read.txt'), 'Hello World!\n')
|
||||
#
|
||||
# if SEND_TO_LLM:
|
||||
# convo = AgentConvo(self.codeMonkey)
|
||||
# else:
|
||||
# convo = MagicMock()
|
||||
# mock_responses = [
|
||||
# # ['file_to_read.txt', 'output.txt'],
|
||||
# [{
|
||||
# 'content': 'Hello World!\n',
|
||||
# 'description': 'This file is the output file. The content of file_to_read.txt is copied into this file.',
|
||||
# 'name': 'output.txt',
|
||||
# 'path': 'output.txt'
|
||||
# }]
|
||||
# ]
|
||||
# convo.send_message.side_effect = mock_responses
|
||||
#
|
||||
# if WRITE_TO_FILE:
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
# else:
|
||||
# with patch.object(Project, 'save_file') as mock_save_file:
|
||||
# # When
|
||||
# self.codeMonkey.implement_code_changes(convo, code_changes_description)
|
||||
#
|
||||
# # Then
|
||||
# clear_directory(workspace)
|
||||
# mock_save_file.assert_called_once()
|
||||
# called_data = mock_save_file.call_args[0][0]
|
||||
# assert called_data['name'] == 'output.txt'
|
||||
# assert (called_data['path'] == '/' or called_data['path'] == called_data['name'])
|
||||
# assert called_data['content'] == 'Hello World!\n'
|
||||
|
||||
@@ -105,7 +105,7 @@ def create_gpt_chat_completion(messages: List[dict], req_type, project,
|
||||
except TokenLimitError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
# logger.error(f'The request to {os.getenv("ENDPOINT")} API failed: %s', e)
|
||||
logger.error(f'The request to {os.getenv("ENDPOINT")} API failed: %s', e)
|
||||
print(f'The request to {os.getenv("ENDPOINT")} API failed. Here is the error message:')
|
||||
print(e)
|
||||
return {} # https://github.com/Pythagora-io/gpt-pilot/issues/130 - may need to revisit how we handle this
|
||||
@@ -154,7 +154,6 @@ def retry_on_exception(func):
|
||||
except Exception as e:
|
||||
# Convert exception to string
|
||||
err_str = str(e)
|
||||
logger.info(f'##### 6, err_str: {err_str}')
|
||||
|
||||
# If the specific error "context_length_exceeded" is present, simply return without retry
|
||||
if isinstance(e, json.JSONDecodeError):
|
||||
@@ -290,8 +289,6 @@ def stream_gpt_completion(data, req_type, project):
|
||||
'Authorization': 'Bearer ' + get_api_key_or_throw('OPENAI_API_KEY')
|
||||
}
|
||||
|
||||
logger.info(f'##### 0.1, endpoint_url: {endpoint_url}, headers: {headers}')
|
||||
|
||||
response = requests.post(
|
||||
endpoint_url,
|
||||
headers=headers,
|
||||
@@ -300,7 +297,7 @@ def stream_gpt_completion(data, req_type, project):
|
||||
)
|
||||
|
||||
# Log the response status code and message
|
||||
logger.info(f'Response status code: {response.status_code}')
|
||||
logger.debug(f'Response status code: {response.status_code}')
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.info(f'problem with request: {response.text}')
|
||||
@@ -309,11 +306,9 @@ def stream_gpt_completion(data, req_type, project):
|
||||
# function_calls = {'name': '', 'arguments': ''}
|
||||
|
||||
for line in response.iter_lines():
|
||||
logger.info(f'##### 0, line: {line}')
|
||||
# Ignore keep-alive new lines
|
||||
if line and line != b': OPENROUTER PROCESSING':
|
||||
line = line.decode("utf-8") # decode the bytes to string
|
||||
logger.info(f'##### 1, line: {line}')
|
||||
|
||||
if line.startswith('data: '):
|
||||
line = line[6:] # remove the 'data: ' prefix
|
||||
@@ -357,8 +352,6 @@ def stream_gpt_completion(data, req_type, project):
|
||||
if 'content' in json_line:
|
||||
content = json_line.get('content')
|
||||
if content:
|
||||
logger.info(f'##### 2, content: {content}')
|
||||
logger.info(f'##### 3, buffer: {buffer}')
|
||||
buffer += content # accumulate the data
|
||||
|
||||
# If you detect a natural breakpoint (e.g., line break or end of a response object), print & count:
|
||||
@@ -370,7 +363,6 @@ def stream_gpt_completion(data, req_type, project):
|
||||
lines_printed += count_lines_based_on_width(buffer, terminal_width)
|
||||
buffer = "" # reset the buffer
|
||||
|
||||
logger.info(f'##### 4, gpt_response: {gpt_response}')
|
||||
gpt_response += content
|
||||
print(content, type='stream', end='', flush=True)
|
||||
|
||||
@@ -382,7 +374,6 @@ def stream_gpt_completion(data, req_type, project):
|
||||
# return return_result({'function_calls': function_calls}, lines_printed)
|
||||
logger.info(f'< Response message: {gpt_response}')
|
||||
|
||||
logger.info(f'##### 5, expecting_json: {expecting_json}')
|
||||
if expecting_json:
|
||||
gpt_response = clean_json_response(gpt_response)
|
||||
assert_json_schema(gpt_response, expecting_json)
|
||||
|
||||
Reference in New Issue
Block a user