From e4b38a2accf4ba443e80060df0456fa358a4d0fe Mon Sep 17 00:00:00 2001 From: Kenneth Belitzky Date: Fri, 16 May 2025 03:15:00 +0000 Subject: [PATCH 1/4] Enhance file processing to include existing content in prompts for modifications --- struct_module/commands/generate.py | 10 +++++++++- struct_module/file_item.py | 11 ++++++++--- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/struct_module/commands/generate.py b/struct_module/commands/generate.py index a0f0c44..3c37d0c 100644 --- a/struct_module/commands/generate.py +++ b/struct_module/commands/generate.py @@ -90,8 +90,16 @@ def _create_structure(self, args): } ) + # Determine the full file path + file_path_to_create = os.path.join(args.base_path, name) + existing_content = None + if os.path.exists(file_path_to_create): + self.logger.info(f"File already exists: {file_path_to_create}") + with open(file_path_to_create, 'r') as existing_file: + existing_content = existing_file.read() + file_item.apply_template_variables(template_vars) - file_item.process_prompt(args.dry_run) + file_item.process_prompt(args.dry_run, existing_content=existing_content) file_item.create( args.base_path, diff --git a/struct_module/file_item.py b/struct_module/file_item.py index d6106c4..bccdbe9 100644 --- a/struct_module/file_item.py +++ b/struct_module/file_item.py @@ -55,7 +55,7 @@ def _configure_openai(self): def _get_file_directory(self): return os.path.dirname(self.name) - def process_prompt(self, dry_run=False): + def process_prompt(self, dry_run=False, existing_content=None): if self.user_prompt: self.logger.debug(f"Using user prompt: {self.user_prompt}") @@ -68,17 +68,22 @@ def process_prompt(self, dry_run=False): else: system_prompt = self.system_prompt + # If existing_content is provided, append it to the user prompt + user_prompt = self.user_prompt + if existing_content: + user_prompt += f"\n\nCurrent file content (if any):\n```\n{existing_content}\n```\n\nPlease modify existing content so that it meets the new requirements." + if dry_run: self.logger.info("[DRY RUN] Would generate content using OpenAI API.") self.content = "[DRY RUN] Generating content using OpenAI" return - if not self.openai_client or not openai_api_key: + if self.openai_client and openai_api_key: completion = self.openai_client.chat.completions.create( model=self.openai_model, messages=[ {"role": "system", "content": system_prompt}, - {"role": "user", "content": self.user_prompt} + {"role": "user", "content": user_prompt} ] ) From 82b8f953bf449f75160c8ff2b3f53b6b66bce9a0 Mon Sep 17 00:00:00 2001 From: Kenneth Belitzky Date: Fri, 16 May 2025 04:05:23 +0000 Subject: [PATCH 2/4] Enhance file processing and update OpenAI model to gpt-4.1 --- example/gpt.yaml | 5 +++++ struct_module/commands/generate.py | 5 ++++- struct_module/file_item.py | 6 ++++-- 3 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 example/gpt.yaml diff --git a/example/gpt.yaml b/example/gpt.yaml new file mode 100644 index 0000000..fcd3a4f --- /dev/null +++ b/example/gpt.yaml @@ -0,0 +1,5 @@ +files: + - .github/workflows/run_struct.yaml: + user_prompt: | + make sure that token is set on secrets and value is TOKEN + make sure that password is set on secrets and value is PASSWORD diff --git a/struct_module/commands/generate.py b/struct_module/commands/generate.py index 3c37d0c..aa8cd4f 100644 --- a/struct_module/commands/generate.py +++ b/struct_module/commands/generate.py @@ -98,8 +98,11 @@ def _create_structure(self, args): with open(file_path_to_create, 'r') as existing_file: existing_content = existing_file.read() + file_item.process_prompt( + args.dry_run, + existing_content=existing_content + ) file_item.apply_template_variables(template_vars) - file_item.process_prompt(args.dry_run, existing_content=existing_content) file_item.create( args.base_path, diff --git a/struct_module/file_item.py b/struct_module/file_item.py index bccdbe9..600c8de 100644 --- a/struct_module/file_item.py +++ b/struct_module/file_item.py @@ -47,7 +47,7 @@ def _configure_openai(self): self.openai_client = OpenAI(api_key=openai_api_key) if not openai_model: self.logger.debug("OpenAI model not found. Using default model.") - self.openai_model = "gpt-3.5-turbo" + self.openai_model = "gpt-4.1" else: self.logger.debug(f"Using OpenAI model: {openai_model}") self.openai_model = openai_model @@ -71,7 +71,7 @@ def process_prompt(self, dry_run=False, existing_content=None): # If existing_content is provided, append it to the user prompt user_prompt = self.user_prompt if existing_content: - user_prompt += f"\n\nCurrent file content (if any):\n```\n{existing_content}\n```\n\nPlease modify existing content so that it meets the new requirements." + user_prompt += f"\n\nCurrent file content (if any):\n```\n{existing_content}\n```\n\nPlease modify existing content so that it meets the new requirements. Your output should be plain text, without any code blocks or formatting. Do not include any explanations or comments. Just provide the final content of the file." if dry_run: self.logger.info("[DRY RUN] Would generate content using OpenAI API.") @@ -118,6 +118,8 @@ def apply_template_variables(self, template_vars): missing_vars = self.template_renderer.prompt_for_missing_vars(self.content, vars) vars.update(missing_vars) + print(self.content) + self.content = self.template_renderer.render_template(self.content, vars) def create(self, base_path, dry_run=False, backup_path=None, file_strategy='overwrite'): From 06c2f10f10943d51ab070804a43423a655ed4060 Mon Sep 17 00:00:00 2001 From: Kenneth Belitzky Date: Fri, 16 May 2025 04:08:42 +0000 Subject: [PATCH 3/4] Refactor prompt processing to log system and user prompts, and include existing content in user prompt --- struct_module/file_item.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/struct_module/file_item.py b/struct_module/file_item.py index 600c8de..98804ea 100644 --- a/struct_module/file_item.py +++ b/struct_module/file_item.py @@ -57,8 +57,6 @@ def _get_file_directory(self): def process_prompt(self, dry_run=False, existing_content=None): if self.user_prompt: - self.logger.debug(f"Using user prompt: {self.user_prompt}") - if not self.openai_client or not openai_api_key: self.logger.warning("Skipping processing prompt as OpenAI API key is not set.") return @@ -73,6 +71,9 @@ def process_prompt(self, dry_run=False, existing_content=None): if existing_content: user_prompt += f"\n\nCurrent file content (if any):\n```\n{existing_content}\n```\n\nPlease modify existing content so that it meets the new requirements. Your output should be plain text, without any code blocks or formatting. Do not include any explanations or comments. Just provide the final content of the file." + self.logger.debug(f"Using system prompt: {system_prompt}") + self.logger.debug(f"Using user prompt: {user_prompt}") + if dry_run: self.logger.info("[DRY RUN] Would generate content using OpenAI API.") self.content = "[DRY RUN] Generating content using OpenAI" From 17a5e12633b40201c0f1a2e45a8e67048a14484b Mon Sep 17 00:00:00 2001 From: Kenneth Belitzky Date: Fri, 16 May 2025 16:01:41 +0000 Subject: [PATCH 4/4] Remove debug print statement from content rendering in FileItem class --- struct_module/file_item.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/struct_module/file_item.py b/struct_module/file_item.py index 98804ea..a8ede99 100644 --- a/struct_module/file_item.py +++ b/struct_module/file_item.py @@ -119,8 +119,6 @@ def apply_template_variables(self, template_vars): missing_vars = self.template_renderer.prompt_for_missing_vars(self.content, vars) vars.update(missing_vars) - print(self.content) - self.content = self.template_renderer.render_template(self.content, vars) def create(self, base_path, dry_run=False, backup_path=None, file_strategy='overwrite'):