mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-24 19:00:27 +01:00
Tested Developer.install_technology before refactoring without function_calling
This commit is contained in:
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -43,4 +43,4 @@ jobs:
|
||||
run: |
|
||||
pip install pytest
|
||||
cd pilot
|
||||
PYTHONPATH=. pytest -m "not slow"
|
||||
PYTHONPATH=. pytest -m "not slow and not uses_tokens"
|
||||
|
||||
61
pilot/helpers/agents/test_Developer.py
Normal file
61
pilot/helpers/agents/test_Developer.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import builtins
|
||||
import os
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from helpers.AgentConvo import AgentConvo
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
|
||||
from main import get_custom_print
|
||||
from .Developer import Developer, ENVIRONMENT_SETUP_STEP
|
||||
from helpers.Project import Project
|
||||
|
||||
|
||||
def mock_terminal_size():
|
||||
mock_size = Mock()
|
||||
mock_size.columns = 80 # or whatever width you want
|
||||
return mock_size
|
||||
|
||||
|
||||
class TestDeveloper:
|
||||
def setup_method(self):
|
||||
builtins.print, ipc_client_instance = get_custom_print({})
|
||||
|
||||
name = 'TestDeveloper'
|
||||
self.project = Project({
|
||||
'app_id': 'test-developer',
|
||||
'name': name,
|
||||
'app_type': ''
|
||||
},
|
||||
name=name,
|
||||
architecture=[],
|
||||
user_stories=[]
|
||||
)
|
||||
|
||||
self.project.root_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
'../../../workspace/TestDeveloper'))
|
||||
self.project.technologies = []
|
||||
self.project.current_step = ENVIRONMENT_SETUP_STEP
|
||||
self.developer = Developer(self.project)
|
||||
|
||||
# @pytest.mark.uses_tokens
|
||||
@patch('helpers.AgentConvo.get_saved_development_step')
|
||||
@patch('helpers.AgentConvo.save_development_step')
|
||||
@patch('helpers.AgentConvo.create_gpt_chat_completion',
|
||||
return_value={'function_calls': {
|
||||
'name': 'execute_command',
|
||||
'arguments': {'command': 'python --version', 'timeout': 10}
|
||||
}})
|
||||
@patch('helpers.cli.styled_text', return_value='no')
|
||||
@patch('helpers.cli.execute_command', return_value=('', 'DONE'))
|
||||
def test_install_technology(self, mock_execute_command, mock_styled_text,
|
||||
mock_completion, mock_save, mock_get_saved_step):
|
||||
# Given
|
||||
self.developer.convo_os_specific_tech = AgentConvo(self.developer)
|
||||
|
||||
# When
|
||||
llm_response = self.developer.install_technology('python')
|
||||
|
||||
# Then
|
||||
assert llm_response == 'DONE'
|
||||
mock_execute_command.assert_called_once_with(self.project, 'python --version', 10)
|
||||
25
pilot/test/mock_questionary.py
Normal file
25
pilot/test/mock_questionary.py
Normal file
@@ -0,0 +1,25 @@
|
||||
class MockQuestionary:
|
||||
def __init__(self, answers=None):
|
||||
if answers is None:
|
||||
answers = []
|
||||
self.answers = iter(answers)
|
||||
self.state = 'project_description'
|
||||
|
||||
def text(self, question: str, style=None):
|
||||
print('AI: ' + question)
|
||||
if question.startswith('User Story'):
|
||||
self.state = 'user_stories'
|
||||
elif question.endswith('write "DONE"'):
|
||||
self.state = 'DONE'
|
||||
return self
|
||||
|
||||
def unsafe_ask(self):
|
||||
if self.state == 'user_stories':
|
||||
answer = ''
|
||||
elif self.state == 'DONE':
|
||||
answer = 'DONE'
|
||||
else: # if self.state == 'project_description':
|
||||
answer = next(self.answers, '')
|
||||
|
||||
print('User:', answer)
|
||||
return answer
|
||||
@@ -6,6 +6,7 @@ load_dotenv()
|
||||
|
||||
from database.database import create_tables
|
||||
from helpers.Project import Project
|
||||
from test.mock_questionary import MockQuestionary
|
||||
from .main import init, get_custom_print
|
||||
|
||||
|
||||
@@ -21,32 +22,8 @@ def test_init():
|
||||
assert args[field] is None
|
||||
|
||||
|
||||
class MockQuestionary():
|
||||
def __init__(self, answers=[]):
|
||||
self.answers = iter(answers)
|
||||
self.state = 'project_description'
|
||||
|
||||
def text(self, question: str, style=None):
|
||||
print('AI: ' + question)
|
||||
if question.startswith('User Story'):
|
||||
self.state = 'user_stories'
|
||||
elif question.endswith('write "DONE"'):
|
||||
self.state = 'DONE'
|
||||
return self
|
||||
|
||||
def unsafe_ask(self):
|
||||
if self.state == 'user_stories':
|
||||
answer = ''
|
||||
elif self.state == 'DONE':
|
||||
answer = 'DONE'
|
||||
else: # if self.state == 'project_description':
|
||||
answer = next(self.answers, '')
|
||||
|
||||
print('User:', answer)
|
||||
return answer
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.uses_tokens
|
||||
@pytest.mark.skip(reason="Uses lots of tokens")
|
||||
def test_end_to_end():
|
||||
# Given
|
||||
|
||||
112
pilot/utils/test_function_calling.py
Normal file
112
pilot/utils/test_function_calling.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from local_llm_function_calling.prompter import CompletionModelPrompter, InstructModelPrompter
|
||||
|
||||
from const.function_calls import ARCHITECTURE, DEV_STEPS
|
||||
from .function_calling import JsonPrompter
|
||||
|
||||
|
||||
def test_completion_function_prompt():
|
||||
# Given
|
||||
prompter = CompletionModelPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions']) # , 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''Create a web-based chat app
|
||||
|
||||
Available functions:
|
||||
process_technologies - Print the list of technologies that are created.
|
||||
```jsonschema
|
||||
{
|
||||
"technologies": {
|
||||
"type": "array",
|
||||
"description": "List of technologies that are created in a list.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "technology"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Function call:
|
||||
|
||||
Function call: '''
|
||||
|
||||
|
||||
def test_instruct_function_prompter():
|
||||
# Given
|
||||
prompter = InstructModelPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions']) # , 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''Your task is to call a function when needed. You will be provided with a list of functions. Available functions:
|
||||
process_technologies - Print the list of technologies that are created.
|
||||
```jsonschema
|
||||
{
|
||||
"technologies": {
|
||||
"type": "array",
|
||||
"description": "List of technologies that are created in a list.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "technology"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Create a web-based chat app
|
||||
|
||||
Function call: '''
|
||||
|
||||
|
||||
def test_json_prompter():
|
||||
# Given
|
||||
prompter = JsonPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions']) # , 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''[INST] <<SYS>>
|
||||
Help choose the appropriate function to call to answer the user's question.
|
||||
In your response you must only use JSON output and provide no notes or commentary.
|
||||
|
||||
Available functions:
|
||||
- process_technologies - Print the list of technologies that are created.
|
||||
<</SYS>>
|
||||
|
||||
Create a web-based chat app [/INST]'''
|
||||
|
||||
|
||||
def test_llama_instruct_function_prompter_named():
|
||||
# Given
|
||||
prompter = LlamaInstructPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions'], 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''[INST] <<SYS>>
|
||||
Define the arguments for process_technologies to answer the user's question.
|
||||
In your response you must only use JSON output and provide no notes or commentary.
|
||||
|
||||
Function description: Print the list of technologies that are created.
|
||||
Function parameters should follow this schema:
|
||||
```jsonschema
|
||||
{
|
||||
"technologies": {
|
||||
"type": "array",
|
||||
"description": "List of technologies that are created in a list.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "technology"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
<</SYS>>
|
||||
|
||||
Create a web-based chat app [/INST]'''
|
||||
@@ -2,7 +2,7 @@ import builtins
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from unittest.mock import patch
|
||||
from local_llm_function_calling.prompter import CompletionModelPrompter, InstructModelPrompter
|
||||
|
||||
|
||||
from const.function_calls import ARCHITECTURE, DEV_STEPS
|
||||
from helpers.AgentConvo import AgentConvo
|
||||
@@ -97,61 +97,7 @@ class TestLlmConnection:
|
||||
# response = response['function_calls']['arguments']['technologies']
|
||||
assert 'Node.js' in response
|
||||
|
||||
def test_completion_function_prompt(self):
|
||||
# Given
|
||||
prompter = CompletionModelPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions']) # , 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''Create a web-based chat app
|
||||
|
||||
Available functions:
|
||||
process_technologies - Print the list of technologies that are created.
|
||||
```jsonschema
|
||||
{
|
||||
"technologies": {
|
||||
"type": "array",
|
||||
"description": "List of technologies that are created in a list.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "technology"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Function call:
|
||||
|
||||
Function call: '''
|
||||
|
||||
def test_instruct_function_prompter(self):
|
||||
# Given
|
||||
prompter = InstructModelPrompter()
|
||||
|
||||
# When
|
||||
prompt = prompter.prompt('Create a web-based chat app', ARCHITECTURE['definitions']) # , 'process_technologies')
|
||||
|
||||
# Then
|
||||
assert prompt == '''Your task is to call a function when needed. You will be provided with a list of functions. Available functions:
|
||||
process_technologies - Print the list of technologies that are created.
|
||||
```jsonschema
|
||||
{
|
||||
"technologies": {
|
||||
"type": "array",
|
||||
"description": "List of technologies that are created in a list.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "technology"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Create a web-based chat app
|
||||
|
||||
Function call: '''
|
||||
|
||||
def _create_convo(self, agent):
|
||||
convo = AgentConvo(agent)
|
||||
@@ -4,4 +4,5 @@ python_files = test_*.py
|
||||
|
||||
markers =
|
||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
||||
uses_tokens: Integration tests which use tokens
|
||||
daily: tests which should be run daily
|
||||
|
||||
Reference in New Issue
Block a user