notex.nvim/tests/performance/test_query_performance.lua

180 lines
No EOL
6 KiB
Lua

-- Performance tests for query execution
local query_engine = require('notex.query')
describe("query performance", function()
local setup_test_data = function()
-- This would normally be handled by the test setup
-- For now, we'll assume test data exists
return true
end
before_each(function()
setup_test_data()
end)
describe("query execution time", function()
it("should execute simple queries under 100ms", function()
local start_time = vim.loop.hrtime()
local result = query_engine.execute_query('FROM documents LIMIT 10')
local end_time = vim.loop.hrtime()
local execution_time_ms = (end_time - start_time) / 1000000
assert.is_true(result.success, "Query should execute successfully")
assert.is_true(execution_time_ms < 100,
string.format("Query took %.2fms, expected < 100ms", execution_time_ms))
end)
it("should execute complex queries under 5 seconds", function()
local start_time = vim.loop.hrtime()
local result = query_engine.execute_query([[
FROM documents
WHERE status = "published"
ORDER BY created_at DESC
LIMIT 100
]])
local end_time = vim.loop.hrtime()
local execution_time_ms = (end_time - start_time) / 1000000
assert.is_true(result.success, "Query should execute successfully")
assert.is_true(execution_time_ms < 5000,
string.format("Complex query took %.2fms, expected < 5000ms", execution_time_ms))
end)
end)
describe("caching performance", function()
it("should improve performance with cached queries", function()
local cache = require('notex.utils.cache')
cache.init({lru = {enabled = true, max_size = 100}})
local query = 'FROM documents WHERE tags LIKE "test" ORDER BY updated_at DESC LIMIT 20'
-- First execution (cold cache)
local start_time = vim.loop.hrtime()
local result1 = query_engine.execute_query(query)
local cold_time = (vim.loop.hrtime() - start_time) / 1000000
-- Second execution (warm cache)
start_time = vim.loop.hrtime()
local result2 = query_engine.execute_query(query)
local warm_time = (vim.loop.hrtime() - start_time) / 1000000
assert.is_true(result1.success, "First query should succeed")
assert.is_true(result2.success, "Second query should succeed")
-- Warm cache should be faster (or at least not significantly slower)
local improvement_ratio = warm_time / cold_time
assert.is_true(improvement_ratio <= 1.5,
string.format("Cache didn't help: cold=%.2fms, warm=%.2fms, ratio=%.2f",
cold_time, warm_time, improvement_ratio))
cache.cleanup()
end)
end)
describe("concurrent query performance", function()
it("should handle multiple concurrent queries", function()
local queries = {
'FROM documents LIMIT 10',
'FROM documents WHERE status = "draft" LIMIT 10',
'FROM documents WHERE created_at > "2023-01-01" LIMIT 10',
'FROM documents ORDER BY updated_at DESC LIMIT 10'
}
local start_time = vim.loop.hrtime()
local results = {}
local errors = {}
-- Execute queries concurrently (simulated with immediate execution)
for i, query in ipairs(queries) do
local ok, result = pcall(query_engine.execute_query, query)
if ok then
results[i] = result
else
errors[i] = result
end
end
local total_time = (vim.loop.hrtime() - start_time) / 1000000
assert.equals(#queries, #results, "All queries should execute")
assert.equals(0, #errors, "No query errors should occur")
for _, result in ipairs(results) do
assert.is_true(result.success, "Each query should succeed")
end
-- Should complete in reasonable time
assert.is_true(total_time < 1000,
string.format("Concurrent queries took %.2fms, expected < 1000ms", total_time))
end)
end)
describe("large result set performance", function()
it("should handle large result sets efficiently", function()
local start_time = vim.loop.hrtime()
local result = query_engine.execute_query('FROM documents LIMIT 1000')
local end_time = vim.loop.hrtime()
local execution_time_ms = (end_time - start_time) / 1000000
assert.is_true(result.success, "Query should execute successfully")
if result.documents then
assert.is_true(#result.documents <= 1000, "Should not exceed limit")
end
-- Even large result sets should be reasonably fast
assert.is_true(execution_time_ms < 2000,
string.format("Large result set took %.2fms, expected < 2000ms", execution_time_ms))
end)
end)
describe("memory usage", function()
it("should not leak memory during repeated queries", function()
local initial_memory = collectgarbage("count")
local query = 'FROM documents LIMIT 10'
-- Execute many queries
for i = 1, 100 do
local result = query_engine.execute_query(query)
assert.is_true(result.success, "Query should succeed")
end
-- Force garbage collection
collectgarbage("collect")
collectgarbage("collect")
local final_memory = collectgarbage("count")
local memory_increase = final_memory - initial_memory
-- Memory increase should be minimal (< 1MB)
assert.is_true(memory_increase < 1000,
string.format("Memory increased by %.2fKB, expected < 1000KB", memory_increase))
end)
end)
describe("index performance", function()
it("should use indexes effectively", function()
-- Test query that should use index
local indexed_query = 'FROM documents WHERE id = "test-id"'
local start_time = vim.loop.hrtime()
local result = query_engine.execute_query(indexed_query)
local indexed_time = (vim.loop.hrtime() - start_time) / 1000000
assert.is_true(result.success, "Indexed query should succeed")
-- Should be very fast with index
assert.is_true(indexed_time < 50,
string.format("Indexed query took %.2fms, expected < 50ms", indexed_time))
end)
end)
end)