mirror of
https://github.com/OMGeeky/gpt-pilot.git
synced 2026-01-05 02:40:21 +01:00
Changed so development steps are hashed by the prompt path, prompt data and an llm request number + added functionality for resuming from a specific development step
This commit is contained in:
@@ -107,19 +107,34 @@ def get_progress_steps(app_id, step=None):
|
||||
return steps
|
||||
|
||||
|
||||
def save_development_step(app_id, messages, response):
|
||||
def save_development_step(app_id, prompt_path, prompt_data, llm_req_num, messages, response):
|
||||
app = get_app(app_id)
|
||||
hash_id = hash_data(messages)
|
||||
hash_id = hash_data({
|
||||
'prompt_path': prompt_path,
|
||||
'prompt_data': prompt_data,
|
||||
'llm_req_num': llm_req_num
|
||||
})
|
||||
try:
|
||||
dev_step = DevelopmentSteps.create(app=app, hash_id=hash_id, messages=messages, llm_response=response)
|
||||
inserted_id = (DevelopmentSteps
|
||||
.insert(app=app, hash_id=hash_id, messages=messages, llm_response=response)
|
||||
.on_conflict(conflict_target=[DevelopmentSteps.app, DevelopmentSteps.hash_id],
|
||||
preserve=[DevelopmentSteps.messages, DevelopmentSteps.llm_response],
|
||||
update={})
|
||||
.execute())
|
||||
|
||||
dev_step = DevelopmentSteps.get_by_id(inserted_id)
|
||||
except IntegrityError:
|
||||
print(f"A Development Step with hash_id {hash_id} already exists.")
|
||||
return None
|
||||
return dev_step
|
||||
|
||||
|
||||
def get_development_step_from_messages(app_id, messages):
|
||||
hash_id = hash_data(messages)
|
||||
def get_development_step_from_hash_id(app_id, prompt_path, prompt_data, llm_req_num):
|
||||
hash_id = hash_data({
|
||||
'prompt_path': prompt_path,
|
||||
'prompt_data': prompt_data,
|
||||
'llm_req_num': llm_req_num
|
||||
})
|
||||
try:
|
||||
dev_step = DevelopmentSteps.get((DevelopmentSteps.hash_id == hash_id) & (DevelopmentSteps.app == app_id))
|
||||
except DoesNotExist:
|
||||
|
||||
@@ -14,4 +14,7 @@ class DevelopmentSteps(BaseModel):
|
||||
llm_response = BinaryJSONField(null=False)
|
||||
|
||||
class Meta:
|
||||
db_table = 'development_steps'
|
||||
db_table = 'development_steps'
|
||||
indexes = (
|
||||
(('app', 'hash_id'), True),
|
||||
)
|
||||
@@ -1,5 +1,5 @@
|
||||
import subprocess
|
||||
from database.database import get_development_step_from_messages, save_development_step
|
||||
from database.database import get_development_step_from_hash_id, save_development_step
|
||||
from utils.utils import array_of_objects_to_string
|
||||
from utils.llm_connection import get_prompt, create_gpt_chat_completion
|
||||
from utils.utils import get_sys_message, find_role_from_step, capitalize_first_word_with_underscores
|
||||
@@ -24,16 +24,20 @@ class AgentConvo:
|
||||
|
||||
|
||||
# check if we already have the LLM response saved
|
||||
development_step = get_development_step_from_messages(self.agent.project.args['app_id'], self.messages)
|
||||
if development_step is not None:
|
||||
self.agent.project.llm_req_num += 1
|
||||
development_step = get_development_step_from_hash_id(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num)
|
||||
if development_step is not None and self.agent.project.skip_steps:
|
||||
# if we do, use it
|
||||
if self.agent.project.skip_until_dev_step and str(development_step.id) == self.agent.project.skip_until_dev_step:
|
||||
self.agent.project.skip_steps = False
|
||||
print(colored(f'Restoring development step with id {development_step.id}', 'yellow'))
|
||||
self.agent.project.restore_files(development_step.id)
|
||||
response = development_step.llm_response
|
||||
self.messages = development_step.messages
|
||||
else:
|
||||
# if we don't, get the response from LLM
|
||||
response = create_gpt_chat_completion(self.messages, self.high_level_step, function_calls=function_calls)
|
||||
development_step = save_development_step(self.agent.project.args['app_id'], self.messages, response)
|
||||
development_step = save_development_step(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num, self.messages, response)
|
||||
self.agent.project.save_files_snapshot(development_step.id)
|
||||
|
||||
# TODO handle errors from OpenAI
|
||||
|
||||
@@ -16,6 +16,9 @@ from database.models.file_snapshot import FileSnapshot
|
||||
class Project:
|
||||
def __init__(self, args, name=None, description=None, user_stories=None, user_tasks=None, architecture=None, development_plan=None, current_step=None):
|
||||
self.args = args
|
||||
self.llm_req_num = 0
|
||||
self.skip_steps = True
|
||||
self.skip_until_dev_step = args['skip_until_dev_step'] if 'skip_until_dev_step' in args else None
|
||||
# TODO make flexible
|
||||
self.root_path = ''
|
||||
|
||||
|
||||
Reference in New Issue
Block a user