Skip to content

Commit

Permalink
Merge branch 'main' into 562-updata-work-summary-to-output-in-a-templ…
Browse files Browse the repository at this point in the history
…ated-format
  • Loading branch information
sauravpanda authored Sep 18, 2024
2 parents 1781312 + 27f4c90 commit 94dfe04
Show file tree
Hide file tree
Showing 10 changed files with 55 additions and 22 deletions.
6 changes: 0 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,3 @@ repos:
hooks:
- id: flake8
args: [--config=.flake8]

- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
args: [--line-length=88]
3 changes: 2 additions & 1 deletion cli/kaizen_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from .config.manager import load_config
from .commands.config_commands import config
from .commands.unit_test_commands import unit_test
from .commands.reviewer_commands import reviewer
from .commands.reviewer_commands import reviewer, generate_commit_msg
from .hooks.setup import hooks
from kaizen.generator.e2e_tests import E2ETestGenerator

Expand All @@ -25,6 +25,7 @@ def ui_tests(url):
cli.add_command(unit_test)
cli.add_command(reviewer)
cli.add_command(hooks)
cli.add_command(generate_commit_msg)

if __name__ == "__main__":
cli()
20 changes: 20 additions & 0 deletions cli/kaizen_cli/commands/reviewer_commands.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import click
from kaizen.generator.pr_description import PRDescriptionGenerator
from kaizen.llms.provider import LLMProvider
from ..config.manager import load_config


@click.group()
Expand All @@ -14,3 +17,20 @@ def work(github_url, branch):
"""Run reviewer work"""
click.echo(f"Reviewing {github_url} on branch {branch}")
# Implement the reviewer work logic here


@click.command()
@click.argument("diff", type=str, required=True)
def generate_commit_msg(diff):
"""Generate a commit message based on the provided diff"""
model_config = load_config()["language_model"]["models"][0]["litellm_params"]
generator = PRDescriptionGenerator(LLMProvider(model_config=model_config))
desc = generator.generate_pull_request_desc(
diff_text=diff,
pull_request_title="",
pull_request_desc="",
pull_request_files=[],
user="",
)
msg, _, _ = generator.generate_pr_commit_message(desc)
click.echo(f'{msg["subject"]}\n\n{msg["body"]}')
2 changes: 1 addition & 1 deletion cli/kaizen_cli/config/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def load_config():

# Override with environment variables
for key, value in os.environ.items():
if key.startswith("MYAPP_"):
if key.startswith("KAIZEN_"):
config_key = key[6:].lower().split("__")
try:
parsed_value = json.loads(value)
Expand Down
12 changes: 9 additions & 3 deletions cli/kaizen_cli/hooks/prepare-commit-msg
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
#!/bin/sh
# hooks/prepare-commit-msg

# Change to the repository root directory
cd "$(git rev-parse --show-toplevel)" || exit 1

# Get the staged changes
staged_diff=$(git diff --cached)

# Run your CLI command and capture the output
commit_msg=$(kaizen-cli generate-commit-msg)
commit_info=$(kaizen-cli generate-commit-msg "$staged_diff")

# Overwrite the commit message file with the generated message
echo "$commit_msg" > "$1"
# Write the commit info to the commit message file
echo "$commit_info" > "$1"
5 changes: 4 additions & 1 deletion cli/kaizen_cli/hooks/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,17 @@ def hooks():
@click.argument("hook_type", type=click.Choice(HOOK_TYPES))
def install(hook_type):
"""Install a specific git hook"""
source = os.path.join(os.path.dirname(__file__), "hooks", hook_type)
source = os.path.join(os.path.dirname(__file__), hook_type)
print(source)
destination = os.path.join(".git", "hooks", hook_type)

if not os.path.exists(source):
click.echo(f"Error: Hook script for {hook_type} not found.")
return

try:
# Create the destination directory if it doesn't exist
os.makedirs(os.path.dirname(destination), exist_ok=True)
shutil.copy(source, destination)
os.chmod(destination, 0o755)
click.echo(f"{hook_type} hook installed successfully")
Expand Down
7 changes: 4 additions & 3 deletions examples/code_review/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@

logging.basicConfig(level="DEBUG")

pr_diff = "https://github.com/Cloud-Code-AI/kaizen/pull/335.patch"
pr_files = "https://api.github.com/repos/Cloud-Code-AI/kaizen/pulls/335/files"
pr_diff = "https://github.com/Cloud-Code-AI/kaizen/pull/559.patch"
pr_files = "https://api.github.com/repos/Cloud-Code-AI/kaizen/pulls/559/files"
pr_title = "feat: updated the prompt to provide solution"

diff_text = get_diff_text(pr_diff, "")
Expand All @@ -31,7 +31,7 @@
reeval_response=False,
)

topics = clean_keys(review_data.topics, "important")
topics = clean_keys(review_data.topics, "high")
review_desc = create_pr_review_text(
review_data.issues, code_quality=review_data.code_quality
)
Expand All @@ -54,3 +54,4 @@
print(desc_data)

comit_message = pr_desc.generate_pr_commit_message(desc_data.desc)
print(comit_message)
4 changes: 2 additions & 2 deletions github_app/github_helper/pull_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

confidence_mapping = {
"critical": 5,
"important": 4,
"moderate": 3,
"high": 4,
"medium": 3,
"low": 2,
"trivial": 1,
}
Expand Down
2 changes: 1 addition & 1 deletion kaizen/generator/pr_description.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,4 +181,4 @@ def generate_pr_commit_message(
DESC=desc,
)
resp, usage = self.provider.chat_completion_with_json(prompt, user=user)
return resp, usage
return resp, usage, self.provider.model
16 changes: 12 additions & 4 deletions kaizen/llms/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import logging
from collections import defaultdict

DEFAULT_MAX_TOKENS = 8000
DEFAULT_MAX_TOKENS = 4000


def set_all_loggers_to_ERROR():
Expand All @@ -36,7 +36,7 @@ def set_all_loggers_to_ERROR():


class LLMProvider:
DEFAULT_MODEL = "gpt-3.5-turbo-1106"
DEFAULT_MODEL = "gpt-4o-mini"
DEFAULT_MAX_TOKENS = 4000
DEFAULT_TEMPERATURE = 0
DEFAULT_MODEL_CONFIG = {"model": DEFAULT_MODEL}
Expand Down Expand Up @@ -233,7 +233,12 @@ def is_inside_token_limit(self, PROMPT: str, percentage: float = 0.8) -> bool:
{"role": "user", "content": PROMPT},
]
token_count = litellm.token_counter(model=self.model, messages=messages)
max_tokens = litellm.get_max_tokens(self.model)
if token_count is None:
token_count = litellm.token_counter(model=self.DEFAULT_MODEL, text=PROMPT)
try:
max_tokens = litellm.get_max_tokens(self.model)
except Exception:
max_tokens = DEFAULT_MAX_TOKENS
if not max_tokens:
max_tokens = DEFAULT_MAX_TOKENS
return token_count <= max_tokens * percentage
Expand All @@ -243,7 +248,10 @@ def available_tokens(
) -> int:
if not model:
model = self.model
max_tokens = litellm.get_max_tokens(model)
try:
max_tokens = litellm.get_max_tokens(model)
except Exception:
max_tokens = DEFAULT_MAX_TOKENS
used_tokens = litellm.token_counter(model=model, text=message)
if max_tokens:
return int(max_tokens * percentage) - used_tokens
Expand Down

0 comments on commit 94dfe04

Please sign in to comment.