Reformatting, fixing tests, adding basic RAG pipeline implementation
This commit is contained in:
parent
a6cdbf1761
commit
24bfef99a2
12 changed files with 721 additions and 131 deletions
35
tests/test_llm.py
Normal file
35
tests/test_llm.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
"""
|
||||
Unit tests for llm chat client functionality
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from reviewllama.configs import create_ollama_config
|
||||
from reviewllama.llm import chat_with_client, create_chat_client
|
||||
from reviewllama.utilities import is_ollama_available
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ollama_config():
|
||||
return create_ollama_config(
|
||||
"gemma3:4b", "localhost:11434", "You are a helpful assistant.", 0.0
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def chat_client(ollama_config):
|
||||
return create_chat_client(ollama_config)
|
||||
|
||||
|
||||
def test_chat_client(ollama_config, chat_client):
|
||||
if not is_ollama_available(ollama_config):
|
||||
pytest.skip("Local Ollama server is not available")
|
||||
|
||||
chat_client = chat_with_client(
|
||||
chat_client, "Tell me your name and introduce yourself briefly"
|
||||
)
|
||||
response = chat_client.get_last_response_or_none()
|
||||
|
||||
assert response is not None
|
||||
assert len(response.content) > 0
|
||||
assert "gemma" in response.content.lower()
|
Loading…
Add table
Add a link
Reference in a new issue