diff --git a/euclid/database/database.py b/euclid/database/database.py index 7e96af5..c58d5e5 100644 --- a/euclid/database/database.py +++ b/euclid/database/database.py @@ -106,20 +106,21 @@ def get_progress_steps(app_id, step=None): return steps -def save_development_step(app_id, messages): +def save_development_step(app_id, messages, response): app = get_app(app_id) hash_id = hash_data(messages) try: - dev_step = DevelopmentSteps.create(app=app, hash_id=hash_id, messages=messages) + dev_step = DevelopmentSteps.create(app=app, hash_id=hash_id, messages=messages, llm_response=response) except IntegrityError: print(f"A Development Step with hash_id {hash_id} already exists.") return None return dev_step -def get_development_step_by_hash_id(hash_id): +def get_development_step_from_messages(app_id, messages): + hash_id = hash_data(messages) try: - dev_step = DevelopmentSteps.get(DevelopmentSteps.hash_id == hash_id) + dev_step = DevelopmentSteps.get((DevelopmentSteps.hash_id == hash_id) & (DevelopmentSteps.app == app_id)) except DoesNotExist: print(f"No Development Step found with hash_id {hash_id}") return None diff --git a/euclid/helpers/AgentConvo.py b/euclid/helpers/AgentConvo.py index d0502dd..6e8b170 100644 --- a/euclid/helpers/AgentConvo.py +++ b/euclid/helpers/AgentConvo.py @@ -1,4 +1,5 @@ import subprocess +from database.database import get_development_step_from_messages, save_development_step from utils.utils import array_of_objects_to_string from utils.llm_connection import get_prompt, create_gpt_chat_completion from utils.utils import get_sys_message, find_role_from_step, capitalize_first_word_with_underscores @@ -21,7 +22,17 @@ class AgentConvo: prompt = get_prompt(prompt_path, prompt_data) self.messages.append({"role": "user", "content": prompt}) - response = create_gpt_chat_completion(self.messages, self.high_level_step, function_calls=function_calls) + + # check if we already have the LLM response saved + saved_checkpoint = get_development_step_from_messages(self.agent.project.args['app_id'], self.messages) + if saved_checkpoint is not None: + # if we do, use it + response = saved_checkpoint.llm_response + self.messages = saved_checkpoint.messages + else: + # if we don't, get the response from LLM + response = create_gpt_chat_completion(self.messages, self.high_level_step, function_calls=function_calls) + save_development_step(self.agent.project.args['app_id'], self.messages, response) # TODO handle errors from OpenAI if response == {}: