Implemented function calling for user stories

This commit is contained in:
Zvonimir Sabljic
2023-07-28 15:09:57 +02:00
parent 32e988db24
commit 81e4e92a08
5 changed files with 50 additions and 9 deletions

View File

@@ -0,0 +1,33 @@
from utils.llm import parse_llm_output
def process_user_stories(stories):
return stories
def return_array_from_prompt(values_in_list):
return {
'name': 'process_user_stories',
'description': f"Print the list of user stories that are created.",
'parameters': {
'type': 'object',
"properties": {
"stories": {
"type": "array",
"description": f"List of user stories that are created in a list.",
"items": {
"type": "string",
"description": "User story"
},
},
},
"required": ["stories"],
},
}
USER_STORIES = {
'definitions': [
return_array_from_prompt('user stories')
],
'functions': {
'process_user_stories': process_user_stories
},
}

View File

@@ -158,7 +158,7 @@ def generate_messages_from_custom_conversation(role, messages, start_role='user'
return result
def execute_chat_prompt(prompt_file, prompt_data, chat_type, previous_messages=None):
def execute_chat_prompt(prompt_file, prompt_data, chat_type, previous_messages=None, function_calls=None):
# Generate a prompt for the completion type.
prompt = get_prompt(prompt_file, prompt_data)
new_message = {"role": "user", "content": prompt}
@@ -173,7 +173,7 @@ def execute_chat_prompt(prompt_file, prompt_data, chat_type, previous_messages=N
new_message,
]
response = create_gpt_chat_completion(messages, chat_type)
response = create_gpt_chat_completion(messages, chat_type, function_calls=function_calls)
messages.append({"role": "assistant", "content": response})

View File

@@ -13,4 +13,6 @@ A: {{ clarification.answer }}
Think step by step about the description for the app Euclid and the additional questions and answers and break down user stories. You will think about the app description and the answers listed and create a list of all user stories. A user story is a description of how a user can interact with the app. For example, if an app's description is `Create a script that finds Youtube channels with the word "test" inside the channel name`, user stories could be:
- `user will run the script from the CLI`
- `user will get the list of all channels in a CSV file`
- `user will get the list of all channels in a CSV file`
Return the list of user stories in a JSON array.

View File

@@ -6,6 +6,7 @@ from utils.utils import execute_step, split_into_bullets, find_role_from_step, g
from database.database import save_progress, get_progress_steps
from logger.logger import logger
from prompts.prompts import get_additional_info_from_user, execute_chat_prompt
from const.function_calls import USER_STORIES
def get_user_stories(summary, args):
@@ -32,11 +33,12 @@ def get_user_stories(summary, args):
logger.info(f"Generating user stories...")
user_stories, user_stories_messages = execute_chat_prompt('user_stories/specs.prompt',
{'prompt': summary, 'app_type': args['app_type']},
current_step)
{'prompt': summary, 'app_type': args['app_type']},
current_step,
function_calls=USER_STORIES)
logger.info(split_into_bullets(user_stories))
user_stories = get_additional_info_from_user(split_into_bullets(user_stories), role)
logger.info(user_stories)
user_stories = get_additional_info_from_user(user_stories, role)
logger.info(f"Final user stories: {user_stories}")

View File

@@ -61,12 +61,12 @@ def create_gpt_chat_completion(messages: List[dict], req_type, min_tokens=MIN_TO
if function_calls is not None:
gpt_data['functions'] = function_calls['definitions']
gpt_data['function_call'] = "auto"
gpt_data['function_call'] = { 'name': function_calls['definitions'][0]['name'] }
try:
response = stream_gpt_completion(gpt_data, req_type)
if 'function_calls' in response and function_calls is not None:
function_calls['callback'](response['function_calls']);
return function_calls['functions'][response['function_calls']['name']](**response['function_calls']['arguments']);
elif 'text' in response:
return response['text']
except Exception as e:
@@ -133,6 +133,10 @@ def stream_gpt_completion(data, req_type):
if content:
gpt_response += content
if function_calls['arguments'] != '':
logger.info(f'Response via function call: {function_calls["arguments"]}')
function_calls['arguments'] = json.loads(function_calls['arguments'])
return { 'function_calls': function_calls };
logger.info(f'Response message: {gpt_response}')
new_code = postprocessing(gpt_response, req_type) # TODO add type dynamically
return { 'text': new_code }