ReviewLlama/tests/test_llm.py

36 lines
936 B
Python
Raw Normal View History

"""
Unit tests for llm chat client functionality
"""
import pytest
from reviewllama.configs import create_ollama_config
from reviewllama.llm import chat_with_client, create_chat_client
from reviewllama.utilities import is_ollama_available
@pytest.fixture
def ollama_config():
return create_ollama_config(
"gemma3:4b", "localhost:11434", "You are a helpful assistant.", 0.0
)
@pytest.fixture
def chat_client(ollama_config):
return create_chat_client(ollama_config)
def test_chat_client(ollama_config, chat_client):
if not is_ollama_available(ollama_config):
pytest.skip("Local Ollama server is not available")
chat_client = chat_with_client(
chat_client, "Tell me your name and introduce yourself briefly"
)
response = chat_client.get_last_response_or_none()
assert response is not None
assert len(response.content) > 0
assert "gemma" in response.content.lower()