notex.nvim/lua/notex/utils/cache.lua

540 lines
12 KiB
Lua
Raw Normal View History

2025-10-05 20:16:33 -04:00
-- Caching system for performance optimization
local M = {}
-- Cache storage
local cache_storage = {
memory = {},
lru = {},
timed = {}
}
-- Cache configuration
local cache_config = {
memory = {
max_size = 1000,
enabled = true
},
lru = {
max_size = 500,
enabled = true
},
timed = {
default_ttl = 300, -- 5 minutes
cleanup_interval = 60, -- 1 minute
enabled = true
}
}
-- Performance metrics
local cache_metrics = {
hits = 0,
misses = 0,
sets = 0,
evictions = 0,
cleanups = 0
}
-- Simple memory cache
local MemoryCache = {}
MemoryCache.__index = MemoryCache
function MemoryCache.new(max_size)
local self = setmetatable({}, MemoryCache)
self.data = {}
self.max_size = max_size or 1000
self.current_size = 0
return self
end
function MemoryCache:set(key, value)
if self.data[key] == nil then
self.current_size = self.current_size + 1
end
self.data[key] = value
-- Evict if over size limit
if self.current_size > self.max_size then
self:evict()
end
cache_metrics.sets = cache_metrics.sets + 1
end
function MemoryCache:get(key)
local value = self.data[key]
if value ~= nil then
cache_metrics.hits = cache_metrics.hits + 1
return value
else
cache_metrics.misses = cache_metrics.misses + 1
return nil
end
end
function MemoryCache:evict()
-- Simple eviction: remove first item
local first_key = next(self.data)
if first_key then
self.data[first_key] = nil
self.current_size = self.current_size - 1
cache_metrics.evictions = cache_metrics.evictions + 1
end
end
function MemoryCache:clear()
self.data = {}
self.current_size = 0
end
function MemoryCache:size()
return self.current_size
end
-- LRU (Least Recently Used) cache
local LRUCache = {}
LRUCache.__index = LRUCache
function LRUCache.new(max_size)
local self = setmetatable({}, LRUCache)
self.data = {}
self.access_order = {}
self.max_size = max_size or 500
return self
end
function LRUCache:set(key, value)
if self.data[key] then
-- Update existing item
self.data[key] = value
self:update_access(key)
else
-- Add new item
self.data[key] = value
table.insert(self.access_order, key)
-- Evict if over size limit
if #self.access_order > self.max_size then
self:evict()
end
end
cache_metrics.sets = cache_metrics.sets + 1
end
function LRUCache:get(key)
local value = self.data[key]
if value ~= nil then
self:update_access(key)
cache_metrics.hits = cache_metrics.hits + 1
return value
else
cache_metrics.misses = cache_metrics.misses + 1
return nil
end
end
function LRUCache:update_access(key)
-- Remove key from current position
for i, k in ipairs(self.access_order) do
if k == key then
table.remove(self.access_order, i)
break
end
end
-- Add to end (most recently used)
table.insert(self.access_order, key)
end
function LRUCache:evict()
if #self.access_order > 0 then
local lru_key = table.remove(self.access_order, 1)
self.data[lru_key] = nil
cache_metrics.evictions = cache_metrics.evictions + 1
end
end
function LRUCache:clear()
self.data = {}
self.access_order = {}
end
function LRUCache:size()
return #self.access_order
end
-- Timed cache with TTL
local TimedCache = {}
TimedCache.__index = TimedCache
function TimedCache.new(default_ttl)
local self = setmetatable({}, TimedCache)
self.data = {}
self.default_ttl = default_ttl or 300
self.cleanup_timer = nil
self:start_cleanup_timer()
return self
end
function TimedCache:set(key, value, ttl)
ttl = ttl or self.default_ttl
local expire_time = os.time() + ttl
self.data[key] = {
value = value,
expire_time = expire_time
}
cache_metrics.sets = cache_metrics.sets + 1
end
function TimedCache:get(key)
local item = self.data[key]
if item then
if os.time() < item.expire_time then
cache_metrics.hits = cache_metrics.hits + 1
return item.value
else
-- Expired, remove it
self.data[key] = nil
end
end
cache_metrics.misses = cache_metrics.misses + 1
return nil
end
function TimedCache:cleanup()
local current_time = os.time()
local cleaned = 0
for key, item in pairs(self.data) do
if current_time >= item.expire_time then
self.data[key] = nil
cleaned = cleaned + 1
end
end
cache_metrics.cleanups = cache_metrics.cleanups + 1
return cleaned
end
function TimedCache:start_cleanup_timer()
if self.cleanup_timer then
return
end
self.cleanup_timer = vim.loop.new_timer()
if self.cleanup_timer then
self.cleanup_timer:start(
cache_config.timed.cleanup_interval * 1000,
cache_config.timed.cleanup_interval * 1000,
vim.schedule_wrap(function()
self:cleanup()
end)
)
end
end
function TimedCache:stop_cleanup_timer()
if self.cleanup_timer then
self.cleanup_timer:close()
self.cleanup_timer = nil
end
end
function TimedCache:clear()
self.data = {}
end
function TimedCache:size()
local count = 0
for _ in pairs(self.data) do
count = count + 1
end
return count
end
-- Initialize caches
function M.init(config)
config = config or {}
cache_config = vim.tbl_deep_extend("force", cache_config, config)
-- Initialize cache instances
if cache_config.memory.enabled then
cache_storage.memory = MemoryCache.new(cache_config.memory.max_size)
end
if cache_config.lru.enabled then
cache_storage.lru = LRUCache.new(cache_config.lru.max_size)
end
if cache_config.timed.enabled then
cache_storage.timed = TimedCache.new(cache_config.timed.default_ttl)
end
M.info("Cache system initialized", cache_config)
end
-- Memory cache operations
function M.memory_set(key, value)
if not cache_storage.memory then
return false, "Memory cache disabled"
end
cache_storage.memory:set(key, value)
return true
end
function M.memory_get(key)
if not cache_storage.memory then
return nil
end
return cache_storage.memory:get(key)
end
-- LRU cache operations
function M.lru_set(key, value)
if not cache_storage.lru then
return false, "LRU cache disabled"
end
cache_storage.lru:set(key, value)
return true
end
function M.lru_get(key)
if not cache_storage.lru then
return nil
end
return cache_storage.lru:get(key)
end
-- Timed cache operations
function M.timed_set(key, value, ttl)
if not cache_storage.timed then
return false, "Timed cache disabled"
end
cache_storage.timed:set(key, value, ttl)
return true
end
function M.timed_get(key)
if not cache_storage.timed then
return nil
end
return cache_storage.timed:get(key)
end
-- Generic cache operations with automatic cache selection
function M.set(key, value, cache_type, ttl)
cache_type = cache_type or "memory"
if cache_type == "memory" then
return M.memory_set(key, value)
elseif cache_type == "lru" then
return M.lru_set(key, value)
elseif cache_type == "timed" then
return M.timed_set(key, value, ttl)
else
return false, "Unknown cache type: " .. cache_type
end
end
function M.get(key, cache_type)
cache_type = cache_type or "memory"
if cache_type == "memory" then
return M.memory_get(key)
elseif cache_type == "lru" then
return M.lru_get(key)
elseif cache_type == "timed" then
return M.timed_get(key)
else
return nil, "Unknown cache type: " .. cache_type
end
end
-- Get or set pattern (compute if not cached)
function M.get_or_set(key, compute_func, cache_type, ttl)
local value = M.get(key, cache_type)
if value ~= nil then
return value
end
-- Compute value
local success, result = pcall(compute_func)
if success then
M.set(key, result, cache_type, ttl)
return result
else
error("Failed to compute cached value: " .. result)
end
end
-- Cache with multiple backends (try each in order)
function M.multi_get(key, cache_types)
cache_types = cache_types or {"memory", "lru", "timed"}
for _, cache_type in ipairs(cache_types) do
local value = M.get(key, cache_type)
if value ~= nil then
return value, cache_type
end
end
return nil
end
-- Invalidate cache entries
function M.invalidate(key, cache_type)
if cache_type then
-- Invalidate specific cache type
if cache_type == "memory" and cache_storage.memory then
cache_storage.memory.data[key] = nil
elseif cache_type == "lru" and cache_storage.lru then
cache_storage.lru.data[key] = nil
for i, k in ipairs(cache_storage.lru.access_order) do
if k == key then
table.remove(cache_storage.lru.access_order, i)
break
end
end
elseif cache_type == "timed" and cache_storage.timed then
cache_storage.timed.data[key] = nil
end
else
-- Invalidate from all caches
M.invalidate(key, "memory")
M.invalidate(key, "lru")
M.invalidate(key, "timed")
end
end
-- Clear all caches
function M.clear_all()
if cache_storage.memory then
cache_storage.memory:clear()
end
if cache_storage.lru then
cache_storage.lru:clear()
end
if cache_storage.timed then
cache_storage.timed:clear()
end
-- Reset metrics
cache_metrics.hits = 0
cache_metrics.misses = 0
cache_metrics.sets = 0
cache_metrics.evictions = 0
cache_metrics.cleanups = 0
M.info("All caches cleared")
end
-- Get cache statistics
function M.get_stats()
local stats = {
metrics = vim.deepcopy(cache_metrics),
sizes = {},
config = vim.deepcopy(cache_config)
}
-- Calculate hit ratio
local total_requests = cache_metrics.hits + cache_metrics.misses
stats.metrics.hit_ratio = total_requests > 0 and (cache_metrics.hits / total_requests) or 0
-- Get cache sizes
if cache_storage.memory then
stats.sizes.memory = cache_storage.memory:size()
end
if cache_storage.lru then
stats.sizes.lru = cache_storage.lru:size()
end
if cache_storage.timed then
stats.sizes.timed = cache_storage.timed:size()
end
return stats
end
-- Cache warming functions
function M.warm_query_cache(queries)
if not cache_storage.lru then
return false, "LRU cache not available"
end
local query_engine = require('notex.query')
local warmed = 0
for _, query in ipairs(queries) do
local key = "query:" .. query
local cached = M.lru_get(key)
if not cached then
-- Execute query and cache result
local result = query_engine.execute_query(query)
if result.success then
M.lru_set(key, result)
warmed = warmed + 1
end
end
end
M.info("Warmed query cache", {queries_warmed = warmed})
return true
end
function M.warm_document_cache(document_paths)
if not cache_storage.memory then
return false, "Memory cache not available"
end
local indexer = require('notex.index')
local warmed = 0
for _, path in ipairs(document_paths) do
local key = "document:" .. path
local cached = M.memory_get(key)
if not cached then
-- Get document details and cache
local details, err = indexer.get_document_details_by_path(path)
if details then
M.memory_set(key, details)
warmed = warmed + 1
end
end
end
M.info("Warmed document cache", {documents_warmed = warmed})
return true
end
-- Cleanup function
function M.cleanup()
if cache_storage.timed then
cache_storage.timed:stop_cleanup_timer()
end
M.clear_all()
M.info("Cache system cleaned up")
end
-- Export cache metrics for monitoring
M.metrics = cache_metrics
M.config = cache_config
-- Forward logging functions (circular dependency resolution)
function M.info(message, context)
local ok, logging = pcall(require, 'notex.utils.logging')
if ok then
logging.info(message, context)
else
vim.notify("Cache: " .. message, vim.log.levels.INFO)
end
end
return M