First working version of reviewLlama with some different default

This commit is contained in:
Alex Selimov 2025-07-15 21:45:56 -04:00
parent d917a9c067
commit f8044a043b
Signed by: aselimov
GPG key ID: 3DDB9C3E023F1F31
2 changed files with 10 additions and 12 deletions

View file

@ -25,7 +25,7 @@ def create_argument_parser() -> argparse.ArgumentParser:
epilog=""" epilog="""
Examples: Examples:
reviewllama . --model gemma3:27b --server localhost:11434 reviewllama . --model gemma3:27b --server localhost:11434
reviewllama src/ tests/ --model llama3.2:7b reviewllama src/ tests/ --model gemma3:4b
""", """,
) )
@ -38,7 +38,7 @@ Examples:
parser.add_argument( parser.add_argument(
"--model", "--model",
default="llama3.2:3b", default="gemma3:4b",
help="Ollama model to use for code review (default: %(default)s)", help="Ollama model to use for code review (default: %(default)s)",
) )
@ -69,7 +69,10 @@ Examples:
default=( default=(
"You are a PR review assistant in charge of softare quality control. " "You are a PR review assistant in charge of softare quality control. "
"You analyze code changes in the context of the full code base to verify style, " "You analyze code changes in the context of the full code base to verify style, "
"syntax, and functionality" "syntax, and functionality. Each suggestion should consist of the original code, "
"suggested changes if relevant, and a short description of why the change is suggested "
"Examples\nInput:\n```\nvarialbe=1+1\n```\nOutput:\nOriginal:\n```\nvarialbe=1+1\n"
"Suggestion:\n```variable=1+1\n```\nReason: `varialbe` is likely a typo."
), ),
help="Base branch to compare against (default: %(default)s)", help="Base branch to compare against (default: %(default)s)",
) )

View file

@ -16,9 +16,7 @@ def run_reviewllama(config: ReviewConfig):
for path in config.paths: for path in config.paths:
chat_client = create_and_log_chat_client(config.ollama) chat_client = create_and_log_chat_client(config.ollama)
analysis = create_and_log_git_diff_analysis(path, config.base_branch) analysis = create_and_log_git_diff_analysis(path, config.base_branch)
retriever = create_and_log_vector_store_retriever( retriever = create_and_log_vector_store_retriever(analysis.repo, config.ollama)
analysis.repo, config.ollama
)
for diff in analysis.diffs: for diff in analysis.diffs:
chat_client = get_suggestions(diff, retriever, chat_client) chat_client = get_suggestions(diff, retriever, chat_client)
@ -37,13 +35,10 @@ def create_and_log_git_diff_analysis(path: Path, base_branch: str) -> GitAnalysi
def create_and_log_vector_store_retriever( def create_and_log_vector_store_retriever(
repo: Repo, config: OllamaConfig repo: Repo, config: OllamaConfig
) -> VectorStoreRetriever: ) -> VectorStoreRetriever:
log_info("Creating vector_store...") log_info("Creating vector_store...")
retriever = create_retriever( retriever = create_retriever(get_tracked_files(repo), config)
get_tracked_files(repo),
config
)
log_info("Done creating vector store") log_info("Done creating vector store")
return retriever return retriever
@ -52,7 +47,7 @@ def get_suggestions(
diff: GitDiff, retriever: VectorStoreRetriever, chat_client: ChatClient diff: GitDiff, retriever: VectorStoreRetriever, chat_client: ChatClient
) -> ChatClient: ) -> ChatClient:
new_client = chat_with_client(chat_client, craft_message(diff), retriever) new_client = chat_with_client(chat_client, craft_message(diff), retriever)
log_info(str(new_client.get_last_response_or_none())) log_info(str(new_client.get_last_response_or_none().content))
return new_client return new_client