Skip to content

Commit

Permalink
pre-commit hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
rsivilli committed Oct 4, 2024
1 parent f4bdc1b commit 5d429d7
Show file tree
Hide file tree
Showing 5 changed files with 164 additions and 67 deletions.
5 changes: 5 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[flake8]
ignore = E203, E266, E501, W503, F403, F401
max-line-length = 130
max-complexity = 18
select = B,C,E,F,W,T4,B9
24 changes: 24 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
# - id: no-commit-to-branch


- repo: https://github.com/psf/black
rev: 22.10.0
hooks:
- id: black
language_version: python3.10

- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
76 changes: 58 additions & 18 deletions lazy_dev_ai/cli.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,31 @@
import click
import os
from lazy_dev_ai.llm import getClient, apply_code_template
from dotenv import load_dotenv
from pathlib import Path

import click
from dotenv import load_dotenv

from lazy_dev_ai.llm import apply_code_template, getClient
from lazy_dev_ai.prompts.defaults import load_default_prompt

# Load .env file and override with values from .env.local if present
load_dotenv('.env')
load_dotenv('.env.local', override=True)
load_dotenv(".env")
load_dotenv(".env.local", override=True)


@click.group()
@click.option('--api-key', envvar='OPENAI_API_KEY', help='API Key for authentication')
@click.option('--project', envvar='OPENAI_PROJECT', default=None, help='Project identifier (optional)')
@click.option('--organization', envvar='OPENAI_ORGANIZATION', default=None, help='Organization identifier (optional)')
@click.option("--api-key", envvar="OPENAI_API_KEY", help="API Key for authentication")
@click.option(
"--project",
envvar="OPENAI_PROJECT",
default=None,
help="Project identifier (optional)",
)
@click.option(
"--organization",
envvar="OPENAI_ORGANIZATION",
default=None,
help="Organization identifier (optional)",
)
def cli(api_key, project=None, organization=None):
"""Initialize the CLI environment.
Expand All @@ -22,39 +35,66 @@ def cli(api_key, project=None, organization=None):
organization (str, optional): The organization identifier.
"""
if not api_key:
click.echo("API key not provided. Use --api-key option or set API_KEY in environment variables or .env file.")
click.echo(
"API key not provided. Use --api-key option or set API_KEY in environment variables or .env file."
)
getClient(api_key=api_key, project=project, organization=organization)


@cli.command()
@click.argument('paths', nargs=-1, type=click.Path(exists=True, path_type=Path))
@click.argument("paths", nargs=-1, type=click.Path(exists=True, path_type=Path))
def improve_comments(paths):
"""Automatically improve or refactor comments in code files.
Args:
paths (Tuple[Path]): The paths to the code files needing comment improvements.
"""
client = getClient()
prompt = load_default_prompt('comment')

prompt = load_default_prompt("comment")
for f in paths:
if f.is_file():
click.echo(f"Refactoring comments in {f.as_posix()}")
apply_code_template(code_file=f, prompt=prompt)


@cli.command()
@click.argument('paths', nargs=-1, type=click.Path(exists=True, path_type=Path))
def generate_docstrings(paths):
@click.argument("paths", nargs=-1, type=click.Path(exists=True, path_type=Path))
def generate_docstrings(paths: list[Path]):
"""Automatically generate docstrings for functions in code files.
Args:
paths (Tuple[Path]): The paths to the code files needing docstrings.
"""
client = getClient()
prompt = load_default_prompt('docstring')
prompt = load_default_prompt("docstring")

for f in paths:
if f.is_file():
click.echo(f"Generating docstring for {f.as_posix()}")
apply_code_template(code_file=f, prompt=prompt)
if __name__ == '__main__':


@cli.command()
@click.option("--prompt", type=str, default=None)
@click.option(
"--prompt-file", default=None, type=click.Path(exists=True, path_type=Path)
)
@click.argument("paths", nargs=-1, type=click.Path(exists=True, path_type=Path))
def custom_prompt(prompt, prompt_file, paths: list[Path]):
"""Use custom prompts for generating artistic content from code files.
Args:
prompt (str, optional): Specified custom prompt as string.
prompt_file (Path, optional): File containing custom prompts.
paths (Tuple[Path]): The paths to the code files where content will be generated.
"""
if prompt is None and prompt_file is None:
raise ValueError("Must provide a prompt string or prompt-file")
for f in paths:
if f.is_file():
click.echo(f"Applying custom prompt to {f.as_posix()}")
apply_code_template(code_file=f, prompt=prompt, prompt_file=prompt_file)
else:
click.echo(f"{f.as_posix()} is not a file")


if __name__ == "__main__":
cli()
47 changes: 35 additions & 12 deletions lazy_dev_ai/files.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,44 @@

import importlib
import importlib.resources
from pathlib import Path
from string import Template

# Load default base template for use from an internal resource file
# Load the default base template from an internal resource file
with importlib.resources.open_text("lazy_dev_ai.templates", "default_base.txt") as file:
default_template = Template(file.read())

def load_template(file: str | Path|None =None) -> Template:
# Use the default template if no specific file is given
if file is None:

def load_template(file: str | Path | None = None) -> Template:
"""
Load a template file into a Template object, or use the default template if no file is specified.
:param file: The path to the template file or None to use the default template.
:type file: str | Path | None
:return: Template object loaded with file content or default content.
:rtype: Template
:raises: FileNotFoundError if the file specified does not exist.
"""
if file is None:
return default_template
# Ensures the file path is valid and can be read
file_path = Path(file)
if not file_path.exists():
raise FileNotFoundError(f"The file {file_path} does not exist.")

with file_path.open("r", encoding="utf-8") as f:
template_content = f.read()

return Template(template_content)


def load_file(file:str | Path) -> str:
def load_file(file: str | Path) -> str:
"""
Read the content of a given file. If the file does not exist, an exception is raised.
:param file: The path to the file to read.
:type file: str | Path
:return: A string containing the content of the file.
:rtype: str
:raises: FileNotFoundError if the file specified does not exist.
"""
file_path = Path(file)
if not file_path.exists():
raise FileNotFoundError(f"The file {file_path} does not exist.")
Expand All @@ -31,8 +47,15 @@ def load_file(file:str | Path) -> str:
return content


def write_file(file:str|Path, contents:str):
def write_file(file: str | Path, contents: str):
"""
Write contents to a specified file. If the file path does not exist, it will be created.
:param file: The path where content will be written.
:type file: str | Path
:param contents: The content to write to the file.
:type contents: str
"""
file_path = Path(file)
with file_path.open("w", encoding="utf-8") as f:
f.write(contents)

79 changes: 42 additions & 37 deletions lazy_dev_ai/llm.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from lazy_dev_ai.files import load_file,load_template,write_file
from openai import OpenAI
from pydantic import BaseModel, Field, ConfigDict, ValidationError
from pathlib import Path
from enum import Enum
from pathlib import Path

from openai import OpenAI
from pydantic import BaseModel, ConfigDict, Field, ValidationError

default_template=None
from lazy_dev_ai.files import load_file, load_template, write_file

default_template = None
client = None


Expand All @@ -13,68 +15,71 @@ class OpenAIRole(str, Enum):
USER = "user"
ASSISTANT = "assistant"

class SEVERITY(str,Enum):

class SEVERITY(str, Enum):
LOW = "LOW"
MEDIUM="MEDIUM"
MEDIUM = "MEDIUM"
HIGH = "HIGH"
CRITICAL = "CRITICAL"


class ChatGPTMessage(BaseModel):
role: OpenAIRole
content: str


class CodeChangeResponse(BaseModel):
model_config = ConfigDict(extra="allow")
change_required:bool
content:str|None = Field(None)
change_explanation:str|None = Field(None)
severity:SEVERITY|None = Field(None)
change_required: bool
content: str | None = Field(None)
change_explanation: str | None = Field(None)
severity: SEVERITY | None = Field(None)


def getClient(api_key:str=None, organization:str=None, project:str=None)->OpenAI:
# Create a client instance only if it hasn't been initialized before
def getClient(
api_key: str = None, organization: str = None, project: str = None
) -> OpenAI:
"""Create and return a new client instance if not already created, using provided credentials."""
global client
if client is None:
client = OpenAI(
api_key=api_key,
organization=organization,
project = project

)
client = OpenAI(api_key=api_key, organization=organization, project=project)
return client


def apply_code_template(code_file:str|Path,prompt_file:str|Path=None,prompt:str=None,model: str = "gpt-4-turbo",template_file:str|Path=None,max_retries:int = 3)->CodeChangeResponse:
# Ensure a valid prompt is provided before proceeding
def apply_code_template(
code_file: str | Path,
prompt_file: str | Path = None,
prompt: str = None,
model: str = "gpt-4-turbo",
template_file: str | Path = None,
max_retries: int = 3,
) -> CodeChangeResponse:
"""Apply a template to the content of a code file using a specified or loaded prompt and handle potential changes."""
if prompt is None and prompt_file is None:
raise ValueError("Must provide either a prompt or a prompt file")
prompt = prompt or load_file(prompt_file)
# Load the template for use
template = load_template(template_file)
# Load the code content to be modified
code = load_file(code_file)
# Prepare system message with file contents and provided prompt
messages = [
ChatGPTMessage(role=OpenAIRole.SYSTEM,content=template.substitute({"file_contents":code,"prompt":prompt})),

ChatGPTMessage(
role=OpenAIRole.SYSTEM,
content=template.substitute({"file_contents": code, "prompt": prompt}),
),
]
# Initialize change checking variables
code_changes = None
attempt_count = 0
# Attempt to generate code changes up to max allowed retries
while code_changes is None and attempt_count < max_retries:
try:
# Attempt to get code changes with available AI model and included file contents
response = getClient().chat.completions.create(model=model, messages=messages)
# Validate the suggested changes from response
code_changes = CodeChangeResponse.model_validate_json(response.choices[0].message.content)
response = getClient().chat.completions.create(
model=model, messages=messages
)
code_changes = CodeChangeResponse.model_validate_json(
response.choices[0].message.content
)
if code_changes.change_required and code_changes.content is not None:
# If changes are required, write them back to the file
write_file(file=code_file,contents=code_changes.content)
write_file(file=code_file, contents=code_changes.content)
return
except ValidationError as e:
# Handle any validation errors
print(str(e))

# If retries exceeded without success, assert error
raise AssertionError("Exceeded max retries with ai")
raise AssertionError("Exceeded max retries with AI integration")

0 comments on commit 5d429d7

Please sign in to comment.