From fec1ce48b6072d591c5fe9f90ae125a46e028124 Mon Sep 17 00:00:00 2001 From: Zvonimir Sabljic Date: Thu, 3 Aug 2023 16:18:18 +0200 Subject: [PATCH] Changed so development steps are hashed by the prompt path, prompt data and an llm request number + added functionality for resuming from a specific development step --- euclid/database/database.py | 25 ++++++++++++++++----- euclid/database/models/development_steps.py | 5 ++++- euclid/helpers/AgentConvo.py | 12 ++++++---- euclid/helpers/Project.py | 3 +++ 4 files changed, 35 insertions(+), 10 deletions(-) diff --git a/euclid/database/database.py b/euclid/database/database.py index 6b2893b..e92f142 100644 --- a/euclid/database/database.py +++ b/euclid/database/database.py @@ -107,19 +107,34 @@ def get_progress_steps(app_id, step=None): return steps -def save_development_step(app_id, messages, response): +def save_development_step(app_id, prompt_path, prompt_data, llm_req_num, messages, response): app = get_app(app_id) - hash_id = hash_data(messages) + hash_id = hash_data({ + 'prompt_path': prompt_path, + 'prompt_data': prompt_data, + 'llm_req_num': llm_req_num + }) try: - dev_step = DevelopmentSteps.create(app=app, hash_id=hash_id, messages=messages, llm_response=response) + inserted_id = (DevelopmentSteps + .insert(app=app, hash_id=hash_id, messages=messages, llm_response=response) + .on_conflict(conflict_target=[DevelopmentSteps.app, DevelopmentSteps.hash_id], + preserve=[DevelopmentSteps.messages, DevelopmentSteps.llm_response], + update={}) + .execute()) + + dev_step = DevelopmentSteps.get_by_id(inserted_id) except IntegrityError: print(f"A Development Step with hash_id {hash_id} already exists.") return None return dev_step -def get_development_step_from_messages(app_id, messages): - hash_id = hash_data(messages) +def get_development_step_from_hash_id(app_id, prompt_path, prompt_data, llm_req_num): + hash_id = hash_data({ + 'prompt_path': prompt_path, + 'prompt_data': prompt_data, + 'llm_req_num': llm_req_num + }) try: dev_step = DevelopmentSteps.get((DevelopmentSteps.hash_id == hash_id) & (DevelopmentSteps.app == app_id)) except DoesNotExist: diff --git a/euclid/database/models/development_steps.py b/euclid/database/models/development_steps.py index 47cba9f..f3d2800 100644 --- a/euclid/database/models/development_steps.py +++ b/euclid/database/models/development_steps.py @@ -14,4 +14,7 @@ class DevelopmentSteps(BaseModel): llm_response = BinaryJSONField(null=False) class Meta: - db_table = 'development_steps' \ No newline at end of file + db_table = 'development_steps' + indexes = ( + (('app', 'hash_id'), True), + ) \ No newline at end of file diff --git a/euclid/helpers/AgentConvo.py b/euclid/helpers/AgentConvo.py index 1f9e5da..e9c364a 100644 --- a/euclid/helpers/AgentConvo.py +++ b/euclid/helpers/AgentConvo.py @@ -1,5 +1,5 @@ import subprocess -from database.database import get_development_step_from_messages, save_development_step +from database.database import get_development_step_from_hash_id, save_development_step from utils.utils import array_of_objects_to_string from utils.llm_connection import get_prompt, create_gpt_chat_completion from utils.utils import get_sys_message, find_role_from_step, capitalize_first_word_with_underscores @@ -24,16 +24,20 @@ class AgentConvo: # check if we already have the LLM response saved - development_step = get_development_step_from_messages(self.agent.project.args['app_id'], self.messages) - if development_step is not None: + self.agent.project.llm_req_num += 1 + development_step = get_development_step_from_hash_id(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num) + if development_step is not None and self.agent.project.skip_steps: # if we do, use it + if self.agent.project.skip_until_dev_step and str(development_step.id) == self.agent.project.skip_until_dev_step: + self.agent.project.skip_steps = False + print(colored(f'Restoring development step with id {development_step.id}', 'yellow')) self.agent.project.restore_files(development_step.id) response = development_step.llm_response self.messages = development_step.messages else: # if we don't, get the response from LLM response = create_gpt_chat_completion(self.messages, self.high_level_step, function_calls=function_calls) - development_step = save_development_step(self.agent.project.args['app_id'], self.messages, response) + development_step = save_development_step(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num, self.messages, response) self.agent.project.save_files_snapshot(development_step.id) # TODO handle errors from OpenAI diff --git a/euclid/helpers/Project.py b/euclid/helpers/Project.py index d96b1f1..3b3d68a 100644 --- a/euclid/helpers/Project.py +++ b/euclid/helpers/Project.py @@ -16,6 +16,9 @@ from database.models.file_snapshot import FileSnapshot class Project: def __init__(self, args, name=None, description=None, user_stories=None, user_tasks=None, architecture=None, development_plan=None, current_step=None): self.args = args + self.llm_req_num = 0 + self.skip_steps = True + self.skip_until_dev_step = args['skip_until_dev_step'] if 'skip_until_dev_step' in args else None # TODO make flexible self.root_path = ''