Initial vibecoded proof of concept
This commit is contained in:
parent
74812459af
commit
461318a656
61 changed files with 13306 additions and 0 deletions
540
lua/notex/utils/cache.lua
Normal file
540
lua/notex/utils/cache.lua
Normal file
|
@ -0,0 +1,540 @@
|
|||
-- Caching system for performance optimization
|
||||
local M = {}
|
||||
|
||||
-- Cache storage
|
||||
local cache_storage = {
|
||||
memory = {},
|
||||
lru = {},
|
||||
timed = {}
|
||||
}
|
||||
|
||||
-- Cache configuration
|
||||
local cache_config = {
|
||||
memory = {
|
||||
max_size = 1000,
|
||||
enabled = true
|
||||
},
|
||||
lru = {
|
||||
max_size = 500,
|
||||
enabled = true
|
||||
},
|
||||
timed = {
|
||||
default_ttl = 300, -- 5 minutes
|
||||
cleanup_interval = 60, -- 1 minute
|
||||
enabled = true
|
||||
}
|
||||
}
|
||||
|
||||
-- Performance metrics
|
||||
local cache_metrics = {
|
||||
hits = 0,
|
||||
misses = 0,
|
||||
sets = 0,
|
||||
evictions = 0,
|
||||
cleanups = 0
|
||||
}
|
||||
|
||||
-- Simple memory cache
|
||||
local MemoryCache = {}
|
||||
MemoryCache.__index = MemoryCache
|
||||
|
||||
function MemoryCache.new(max_size)
|
||||
local self = setmetatable({}, MemoryCache)
|
||||
self.data = {}
|
||||
self.max_size = max_size or 1000
|
||||
self.current_size = 0
|
||||
return self
|
||||
end
|
||||
|
||||
function MemoryCache:set(key, value)
|
||||
if self.data[key] == nil then
|
||||
self.current_size = self.current_size + 1
|
||||
end
|
||||
|
||||
self.data[key] = value
|
||||
|
||||
-- Evict if over size limit
|
||||
if self.current_size > self.max_size then
|
||||
self:evict()
|
||||
end
|
||||
|
||||
cache_metrics.sets = cache_metrics.sets + 1
|
||||
end
|
||||
|
||||
function MemoryCache:get(key)
|
||||
local value = self.data[key]
|
||||
if value ~= nil then
|
||||
cache_metrics.hits = cache_metrics.hits + 1
|
||||
return value
|
||||
else
|
||||
cache_metrics.misses = cache_metrics.misses + 1
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
function MemoryCache:evict()
|
||||
-- Simple eviction: remove first item
|
||||
local first_key = next(self.data)
|
||||
if first_key then
|
||||
self.data[first_key] = nil
|
||||
self.current_size = self.current_size - 1
|
||||
cache_metrics.evictions = cache_metrics.evictions + 1
|
||||
end
|
||||
end
|
||||
|
||||
function MemoryCache:clear()
|
||||
self.data = {}
|
||||
self.current_size = 0
|
||||
end
|
||||
|
||||
function MemoryCache:size()
|
||||
return self.current_size
|
||||
end
|
||||
|
||||
-- LRU (Least Recently Used) cache
|
||||
local LRUCache = {}
|
||||
LRUCache.__index = LRUCache
|
||||
|
||||
function LRUCache.new(max_size)
|
||||
local self = setmetatable({}, LRUCache)
|
||||
self.data = {}
|
||||
self.access_order = {}
|
||||
self.max_size = max_size or 500
|
||||
return self
|
||||
end
|
||||
|
||||
function LRUCache:set(key, value)
|
||||
if self.data[key] then
|
||||
-- Update existing item
|
||||
self.data[key] = value
|
||||
self:update_access(key)
|
||||
else
|
||||
-- Add new item
|
||||
self.data[key] = value
|
||||
table.insert(self.access_order, key)
|
||||
|
||||
-- Evict if over size limit
|
||||
if #self.access_order > self.max_size then
|
||||
self:evict()
|
||||
end
|
||||
end
|
||||
|
||||
cache_metrics.sets = cache_metrics.sets + 1
|
||||
end
|
||||
|
||||
function LRUCache:get(key)
|
||||
local value = self.data[key]
|
||||
if value ~= nil then
|
||||
self:update_access(key)
|
||||
cache_metrics.hits = cache_metrics.hits + 1
|
||||
return value
|
||||
else
|
||||
cache_metrics.misses = cache_metrics.misses + 1
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
function LRUCache:update_access(key)
|
||||
-- Remove key from current position
|
||||
for i, k in ipairs(self.access_order) do
|
||||
if k == key then
|
||||
table.remove(self.access_order, i)
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
-- Add to end (most recently used)
|
||||
table.insert(self.access_order, key)
|
||||
end
|
||||
|
||||
function LRUCache:evict()
|
||||
if #self.access_order > 0 then
|
||||
local lru_key = table.remove(self.access_order, 1)
|
||||
self.data[lru_key] = nil
|
||||
cache_metrics.evictions = cache_metrics.evictions + 1
|
||||
end
|
||||
end
|
||||
|
||||
function LRUCache:clear()
|
||||
self.data = {}
|
||||
self.access_order = {}
|
||||
end
|
||||
|
||||
function LRUCache:size()
|
||||
return #self.access_order
|
||||
end
|
||||
|
||||
-- Timed cache with TTL
|
||||
local TimedCache = {}
|
||||
TimedCache.__index = TimedCache
|
||||
|
||||
function TimedCache.new(default_ttl)
|
||||
local self = setmetatable({}, TimedCache)
|
||||
self.data = {}
|
||||
self.default_ttl = default_ttl or 300
|
||||
self.cleanup_timer = nil
|
||||
self:start_cleanup_timer()
|
||||
return self
|
||||
end
|
||||
|
||||
function TimedCache:set(key, value, ttl)
|
||||
ttl = ttl or self.default_ttl
|
||||
local expire_time = os.time() + ttl
|
||||
|
||||
self.data[key] = {
|
||||
value = value,
|
||||
expire_time = expire_time
|
||||
}
|
||||
|
||||
cache_metrics.sets = cache_metrics.sets + 1
|
||||
end
|
||||
|
||||
function TimedCache:get(key)
|
||||
local item = self.data[key]
|
||||
if item then
|
||||
if os.time() < item.expire_time then
|
||||
cache_metrics.hits = cache_metrics.hits + 1
|
||||
return item.value
|
||||
else
|
||||
-- Expired, remove it
|
||||
self.data[key] = nil
|
||||
end
|
||||
end
|
||||
|
||||
cache_metrics.misses = cache_metrics.misses + 1
|
||||
return nil
|
||||
end
|
||||
|
||||
function TimedCache:cleanup()
|
||||
local current_time = os.time()
|
||||
local cleaned = 0
|
||||
|
||||
for key, item in pairs(self.data) do
|
||||
if current_time >= item.expire_time then
|
||||
self.data[key] = nil
|
||||
cleaned = cleaned + 1
|
||||
end
|
||||
end
|
||||
|
||||
cache_metrics.cleanups = cache_metrics.cleanups + 1
|
||||
return cleaned
|
||||
end
|
||||
|
||||
function TimedCache:start_cleanup_timer()
|
||||
if self.cleanup_timer then
|
||||
return
|
||||
end
|
||||
|
||||
self.cleanup_timer = vim.loop.new_timer()
|
||||
if self.cleanup_timer then
|
||||
self.cleanup_timer:start(
|
||||
cache_config.timed.cleanup_interval * 1000,
|
||||
cache_config.timed.cleanup_interval * 1000,
|
||||
vim.schedule_wrap(function()
|
||||
self:cleanup()
|
||||
end)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
function TimedCache:stop_cleanup_timer()
|
||||
if self.cleanup_timer then
|
||||
self.cleanup_timer:close()
|
||||
self.cleanup_timer = nil
|
||||
end
|
||||
end
|
||||
|
||||
function TimedCache:clear()
|
||||
self.data = {}
|
||||
end
|
||||
|
||||
function TimedCache:size()
|
||||
local count = 0
|
||||
for _ in pairs(self.data) do
|
||||
count = count + 1
|
||||
end
|
||||
return count
|
||||
end
|
||||
|
||||
-- Initialize caches
|
||||
function M.init(config)
|
||||
config = config or {}
|
||||
cache_config = vim.tbl_deep_extend("force", cache_config, config)
|
||||
|
||||
-- Initialize cache instances
|
||||
if cache_config.memory.enabled then
|
||||
cache_storage.memory = MemoryCache.new(cache_config.memory.max_size)
|
||||
end
|
||||
|
||||
if cache_config.lru.enabled then
|
||||
cache_storage.lru = LRUCache.new(cache_config.lru.max_size)
|
||||
end
|
||||
|
||||
if cache_config.timed.enabled then
|
||||
cache_storage.timed = TimedCache.new(cache_config.timed.default_ttl)
|
||||
end
|
||||
|
||||
M.info("Cache system initialized", cache_config)
|
||||
end
|
||||
|
||||
-- Memory cache operations
|
||||
function M.memory_set(key, value)
|
||||
if not cache_storage.memory then
|
||||
return false, "Memory cache disabled"
|
||||
end
|
||||
cache_storage.memory:set(key, value)
|
||||
return true
|
||||
end
|
||||
|
||||
function M.memory_get(key)
|
||||
if not cache_storage.memory then
|
||||
return nil
|
||||
end
|
||||
return cache_storage.memory:get(key)
|
||||
end
|
||||
|
||||
-- LRU cache operations
|
||||
function M.lru_set(key, value)
|
||||
if not cache_storage.lru then
|
||||
return false, "LRU cache disabled"
|
||||
end
|
||||
cache_storage.lru:set(key, value)
|
||||
return true
|
||||
end
|
||||
|
||||
function M.lru_get(key)
|
||||
if not cache_storage.lru then
|
||||
return nil
|
||||
end
|
||||
return cache_storage.lru:get(key)
|
||||
end
|
||||
|
||||
-- Timed cache operations
|
||||
function M.timed_set(key, value, ttl)
|
||||
if not cache_storage.timed then
|
||||
return false, "Timed cache disabled"
|
||||
end
|
||||
cache_storage.timed:set(key, value, ttl)
|
||||
return true
|
||||
end
|
||||
|
||||
function M.timed_get(key)
|
||||
if not cache_storage.timed then
|
||||
return nil
|
||||
end
|
||||
return cache_storage.timed:get(key)
|
||||
end
|
||||
|
||||
-- Generic cache operations with automatic cache selection
|
||||
function M.set(key, value, cache_type, ttl)
|
||||
cache_type = cache_type or "memory"
|
||||
|
||||
if cache_type == "memory" then
|
||||
return M.memory_set(key, value)
|
||||
elseif cache_type == "lru" then
|
||||
return M.lru_set(key, value)
|
||||
elseif cache_type == "timed" then
|
||||
return M.timed_set(key, value, ttl)
|
||||
else
|
||||
return false, "Unknown cache type: " .. cache_type
|
||||
end
|
||||
end
|
||||
|
||||
function M.get(key, cache_type)
|
||||
cache_type = cache_type or "memory"
|
||||
|
||||
if cache_type == "memory" then
|
||||
return M.memory_get(key)
|
||||
elseif cache_type == "lru" then
|
||||
return M.lru_get(key)
|
||||
elseif cache_type == "timed" then
|
||||
return M.timed_get(key)
|
||||
else
|
||||
return nil, "Unknown cache type: " .. cache_type
|
||||
end
|
||||
end
|
||||
|
||||
-- Get or set pattern (compute if not cached)
|
||||
function M.get_or_set(key, compute_func, cache_type, ttl)
|
||||
local value = M.get(key, cache_type)
|
||||
if value ~= nil then
|
||||
return value
|
||||
end
|
||||
|
||||
-- Compute value
|
||||
local success, result = pcall(compute_func)
|
||||
if success then
|
||||
M.set(key, result, cache_type, ttl)
|
||||
return result
|
||||
else
|
||||
error("Failed to compute cached value: " .. result)
|
||||
end
|
||||
end
|
||||
|
||||
-- Cache with multiple backends (try each in order)
|
||||
function M.multi_get(key, cache_types)
|
||||
cache_types = cache_types or {"memory", "lru", "timed"}
|
||||
|
||||
for _, cache_type in ipairs(cache_types) do
|
||||
local value = M.get(key, cache_type)
|
||||
if value ~= nil then
|
||||
return value, cache_type
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Invalidate cache entries
|
||||
function M.invalidate(key, cache_type)
|
||||
if cache_type then
|
||||
-- Invalidate specific cache type
|
||||
if cache_type == "memory" and cache_storage.memory then
|
||||
cache_storage.memory.data[key] = nil
|
||||
elseif cache_type == "lru" and cache_storage.lru then
|
||||
cache_storage.lru.data[key] = nil
|
||||
for i, k in ipairs(cache_storage.lru.access_order) do
|
||||
if k == key then
|
||||
table.remove(cache_storage.lru.access_order, i)
|
||||
break
|
||||
end
|
||||
end
|
||||
elseif cache_type == "timed" and cache_storage.timed then
|
||||
cache_storage.timed.data[key] = nil
|
||||
end
|
||||
else
|
||||
-- Invalidate from all caches
|
||||
M.invalidate(key, "memory")
|
||||
M.invalidate(key, "lru")
|
||||
M.invalidate(key, "timed")
|
||||
end
|
||||
end
|
||||
|
||||
-- Clear all caches
|
||||
function M.clear_all()
|
||||
if cache_storage.memory then
|
||||
cache_storage.memory:clear()
|
||||
end
|
||||
|
||||
if cache_storage.lru then
|
||||
cache_storage.lru:clear()
|
||||
end
|
||||
|
||||
if cache_storage.timed then
|
||||
cache_storage.timed:clear()
|
||||
end
|
||||
|
||||
-- Reset metrics
|
||||
cache_metrics.hits = 0
|
||||
cache_metrics.misses = 0
|
||||
cache_metrics.sets = 0
|
||||
cache_metrics.evictions = 0
|
||||
cache_metrics.cleanups = 0
|
||||
|
||||
M.info("All caches cleared")
|
||||
end
|
||||
|
||||
-- Get cache statistics
|
||||
function M.get_stats()
|
||||
local stats = {
|
||||
metrics = vim.deepcopy(cache_metrics),
|
||||
sizes = {},
|
||||
config = vim.deepcopy(cache_config)
|
||||
}
|
||||
|
||||
-- Calculate hit ratio
|
||||
local total_requests = cache_metrics.hits + cache_metrics.misses
|
||||
stats.metrics.hit_ratio = total_requests > 0 and (cache_metrics.hits / total_requests) or 0
|
||||
|
||||
-- Get cache sizes
|
||||
if cache_storage.memory then
|
||||
stats.sizes.memory = cache_storage.memory:size()
|
||||
end
|
||||
|
||||
if cache_storage.lru then
|
||||
stats.sizes.lru = cache_storage.lru:size()
|
||||
end
|
||||
|
||||
if cache_storage.timed then
|
||||
stats.sizes.timed = cache_storage.timed:size()
|
||||
end
|
||||
|
||||
return stats
|
||||
end
|
||||
|
||||
-- Cache warming functions
|
||||
function M.warm_query_cache(queries)
|
||||
if not cache_storage.lru then
|
||||
return false, "LRU cache not available"
|
||||
end
|
||||
|
||||
local query_engine = require('notex.query')
|
||||
local warmed = 0
|
||||
|
||||
for _, query in ipairs(queries) do
|
||||
local key = "query:" .. query
|
||||
local cached = M.lru_get(key)
|
||||
if not cached then
|
||||
-- Execute query and cache result
|
||||
local result = query_engine.execute_query(query)
|
||||
if result.success then
|
||||
M.lru_set(key, result)
|
||||
warmed = warmed + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
M.info("Warmed query cache", {queries_warmed = warmed})
|
||||
return true
|
||||
end
|
||||
|
||||
function M.warm_document_cache(document_paths)
|
||||
if not cache_storage.memory then
|
||||
return false, "Memory cache not available"
|
||||
end
|
||||
|
||||
local indexer = require('notex.index')
|
||||
local warmed = 0
|
||||
|
||||
for _, path in ipairs(document_paths) do
|
||||
local key = "document:" .. path
|
||||
local cached = M.memory_get(key)
|
||||
if not cached then
|
||||
-- Get document details and cache
|
||||
local details, err = indexer.get_document_details_by_path(path)
|
||||
if details then
|
||||
M.memory_set(key, details)
|
||||
warmed = warmed + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
M.info("Warmed document cache", {documents_warmed = warmed})
|
||||
return true
|
||||
end
|
||||
|
||||
-- Cleanup function
|
||||
function M.cleanup()
|
||||
if cache_storage.timed then
|
||||
cache_storage.timed:stop_cleanup_timer()
|
||||
end
|
||||
|
||||
M.clear_all()
|
||||
M.info("Cache system cleaned up")
|
||||
end
|
||||
|
||||
-- Export cache metrics for monitoring
|
||||
M.metrics = cache_metrics
|
||||
M.config = cache_config
|
||||
|
||||
-- Forward logging functions (circular dependency resolution)
|
||||
function M.info(message, context)
|
||||
local ok, logging = pcall(require, 'notex.utils.logging')
|
||||
if ok then
|
||||
logging.info(message, context)
|
||||
else
|
||||
vim.notify("Cache: " .. message, vim.log.levels.INFO)
|
||||
end
|
||||
end
|
||||
|
||||
return M
|
398
lua/notex/utils/date.lua
Normal file
398
lua/notex/utils/date.lua
Normal file
|
@ -0,0 +1,398 @@
|
|||
-- Date parsing and formatting utilities
|
||||
local M = {}
|
||||
|
||||
-- Date format patterns
|
||||
local DATE_PATTERNS = {
|
||||
ISO_8601 = "^%d%d%d%d%-%d%d%-%d%d$",
|
||||
ISO_8601_TIME = "^%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%dZ?$",
|
||||
ISO_8601_OFFSET = "^%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d[%+%-]%d%d:%d%d$",
|
||||
RELATIVE = "^(%d+)([hdwmy])$",
|
||||
NATURAL = "^(%d%d%d%d)%-(%d%d)%-(%d%d)$"
|
||||
}
|
||||
|
||||
-- Month names
|
||||
local MONTH_NAMES = {
|
||||
"January", "February", "March", "April", "May", "June",
|
||||
"July", "August", "September", "October", "November", "December"
|
||||
}
|
||||
|
||||
local MONTH_SHORT = {
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
|
||||
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"
|
||||
}
|
||||
|
||||
-- Parse date string to timestamp
|
||||
function M.parse_date(date_string)
|
||||
if not date_string or date_string == "" then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Handle relative dates
|
||||
if date_string:match(DATE_PATTERNS.RELATIVE) then
|
||||
return M.parse_relative_date(date_string)
|
||||
end
|
||||
|
||||
-- Handle ISO 8601 with time
|
||||
if date_string:match(DATE_PATTERNS.ISO_8601_TIME) then
|
||||
return M.parse_iso8601_datetime(date_string)
|
||||
end
|
||||
|
||||
-- Handle ISO 8601 with offset
|
||||
if date_string:match(DATE_PATTERNS.ISO_8601_OFFSET) then
|
||||
return M.parse_iso8601_with_offset(date_string)
|
||||
end
|
||||
|
||||
-- Handle natural language dates
|
||||
if date_string:match(DATE_PATTERNS.NATURAL) then
|
||||
return M.parse_natural_date(date_string)
|
||||
end
|
||||
|
||||
-- Handle ISO 8601 date only
|
||||
if date_string:match(DATE_PATTERNS.ISO_8601) then
|
||||
return M.parse_iso8601_date(date_string)
|
||||
end
|
||||
|
||||
-- Handle common formats
|
||||
return M.parse_common_formats(date_string)
|
||||
end
|
||||
|
||||
-- Parse ISO 8601 date
|
||||
function M.parse_iso8601_date(date_string)
|
||||
local year, month, day = date_string:match("^(%d%d%d%d)%-(%d%d)%-(%d%d)$")
|
||||
if not year then
|
||||
return nil
|
||||
end
|
||||
|
||||
local timestamp = os.time({
|
||||
year = tonumber(year),
|
||||
month = tonumber(month),
|
||||
day = tonumber(day),
|
||||
hour = 0,
|
||||
min = 0,
|
||||
sec = 0
|
||||
})
|
||||
|
||||
return timestamp
|
||||
end
|
||||
|
||||
-- Parse ISO 8601 datetime
|
||||
function M.parse_iso8601_datetime(date_string)
|
||||
local year, month, day, hour, min, sec, timezone = date_string:match("^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)(Z?)$")
|
||||
if not year then
|
||||
return nil
|
||||
end
|
||||
|
||||
local timestamp = os.time({
|
||||
year = tonumber(year),
|
||||
month = tonumber(month),
|
||||
day = tonumber(day),
|
||||
hour = tonumber(hour),
|
||||
min = tonumber(min),
|
||||
sec = tonumber(sec)
|
||||
})
|
||||
|
||||
return timestamp
|
||||
end
|
||||
|
||||
-- Parse ISO 8601 with timezone offset
|
||||
function M.parse_iso8601_with_offset(date_string)
|
||||
local year, month, day, hour, min, sec, offset_sign, offset_hour, offset_min = date_string:match("^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
|
||||
if not year then
|
||||
return nil
|
||||
end
|
||||
|
||||
local timestamp = os.time({
|
||||
year = tonumber(year),
|
||||
month = tonumber(month),
|
||||
day = tonumber(day),
|
||||
hour = tonumber(hour),
|
||||
min = tonumber(min),
|
||||
sec = tonumber(sec)
|
||||
})
|
||||
|
||||
-- Apply timezone offset (convert to UTC)
|
||||
local offset_total = tonumber(offset_hour) * 3600 + tonumber(offset_min) * 60
|
||||
if offset_sign == "-" then
|
||||
offset_total = -offset_total
|
||||
end
|
||||
|
||||
timestamp = timestamp - offset_total
|
||||
|
||||
return timestamp
|
||||
end
|
||||
|
||||
-- Parse natural date
|
||||
function M.parse_natural_date(date_string)
|
||||
local year, month, day = date_string:match("^(%d%d%d%d)%-(%d%d)%-(%d%d)$")
|
||||
if not year then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Handle natural language month names
|
||||
if not month then
|
||||
local month_name, day_part, year_part = date_string:match("^(%a+)%s+(%d+)%s+(%d+)$")
|
||||
if month_name and day_part and year_part then
|
||||
month = M.get_month_number(month_name)
|
||||
day = tonumber(day_part)
|
||||
year = tonumber(year_part)
|
||||
end
|
||||
end
|
||||
|
||||
if year and month and day then
|
||||
return os.time({
|
||||
year = tonumber(year),
|
||||
month = tonumber(month),
|
||||
day = tonumber(day),
|
||||
hour = 0,
|
||||
min = 0,
|
||||
sec = 0
|
||||
})
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Parse relative date
|
||||
function M.parse_relative_date(date_string)
|
||||
local amount, unit = date_string:match("^(%d+)([hdwmy])$")
|
||||
if not amount or not unit then
|
||||
return nil
|
||||
end
|
||||
|
||||
local current_time = os.time()
|
||||
amount = tonumber(amount)
|
||||
|
||||
local seconds = 0
|
||||
if unit == "s" then
|
||||
seconds = amount
|
||||
elseif unit == "m" then
|
||||
seconds = amount * 60
|
||||
elseif unit == "h" then
|
||||
seconds = amount * 3600
|
||||
elseif unit == "d" then
|
||||
seconds = amount * 86400
|
||||
elseif unit == "w" then
|
||||
seconds = amount * 604800
|
||||
elseif unit == "m" then
|
||||
seconds = amount * 2592000 -- 30 days
|
||||
elseif unit == "y" then
|
||||
seconds = amount * 31536000 -- 365 days
|
||||
end
|
||||
|
||||
return current_time - seconds
|
||||
end
|
||||
|
||||
-- Parse common formats
|
||||
function M.parse_common_formats(date_string)
|
||||
local formats = {
|
||||
"^%d%d%d%d%/%d%d%/%d%d$", -- MM/DD/YYYY
|
||||
"^%d%d%/%d%d%/%d%d%d%d$", -- M/D/YYYY
|
||||
"^%d%d%-%d%d%-%d%d%d%d$", -- MM-DD-YYYY
|
||||
}
|
||||
|
||||
for _, pattern in ipairs(formats) do
|
||||
if date_string:match(pattern) then
|
||||
-- Try to parse with Lua's built-in date parsing
|
||||
local timestamp = os.time({
|
||||
year = tonumber(date_string:sub(-4)),
|
||||
month = tonumber(date_string:sub(1, 2)),
|
||||
day = tonumber(date_string:sub(4, 5)),
|
||||
hour = 0,
|
||||
min = 0,
|
||||
sec = 0
|
||||
})
|
||||
|
||||
if timestamp > 0 then
|
||||
return timestamp
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Format timestamp to string
|
||||
function M.format_date(timestamp, format)
|
||||
format = format or "%Y-%m-%d"
|
||||
|
||||
if not timestamp then
|
||||
return ""
|
||||
end
|
||||
|
||||
return os.date(format, timestamp)
|
||||
end
|
||||
|
||||
-- Get relative time string
|
||||
function M.get_relative_time(timestamp)
|
||||
local current_time = os.time()
|
||||
local diff = current_time - timestamp
|
||||
|
||||
if diff < 60 then
|
||||
return "just now"
|
||||
elseif diff < 3600 then
|
||||
local minutes = math.floor(diff / 60)
|
||||
return string.format("%d minute%s ago", minutes, minutes > 1 and "s" or "")
|
||||
elseif diff < 86400 then
|
||||
local hours = math.floor(diff / 3600)
|
||||
return string.format("%d hour%s ago", hours, hours > 1 and "s" or "")
|
||||
elseif diff < 2592000 then
|
||||
local days = math.floor(diff / 86400)
|
||||
return string.format("%d day%s ago", days, days > 1 and "s" or "")
|
||||
elseif diff < 31536000 then
|
||||
local months = math.floor(diff / 2592000)
|
||||
return string.format("%d month%s ago", months, months > 1 and "s" or "")
|
||||
else
|
||||
local years = math.floor(diff / 31536000)
|
||||
return string.format("%d year%s ago", years, years > 1 and "s" or "")
|
||||
end
|
||||
end
|
||||
|
||||
-- Get month number from name
|
||||
function M.get_month_number(month_name)
|
||||
local lower_name = month_name:lower()
|
||||
|
||||
for i, name in ipairs(MONTH_NAMES) do
|
||||
if name:lower() == lower_name then
|
||||
return i
|
||||
end
|
||||
end
|
||||
|
||||
for i, name in ipairs(MONTH_SHORT) do
|
||||
if name:lower() == lower_name then
|
||||
return i
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Get month name from number
|
||||
function M.get_month_name(month_number, short)
|
||||
if month_number < 1 or month_number > 12 then
|
||||
return nil
|
||||
end
|
||||
|
||||
if short then
|
||||
return MONTH_SHORT[month_number]
|
||||
else
|
||||
return MONTH_NAMES[month_number]
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate date string
|
||||
function M.is_valid_date(date_string)
|
||||
return M.parse_date(date_string) ~= nil
|
||||
end
|
||||
|
||||
-- Add time to timestamp
|
||||
function M.add_time(timestamp, amount, unit)
|
||||
unit = unit or "days"
|
||||
|
||||
local seconds = 0
|
||||
if unit == "seconds" then
|
||||
seconds = amount
|
||||
elseif unit == "minutes" then
|
||||
seconds = amount * 60
|
||||
elseif unit == "hours" then
|
||||
seconds = amount * 3600
|
||||
elseif unit == "days" then
|
||||
seconds = amount * 86400
|
||||
elseif unit == "weeks" then
|
||||
seconds = amount * 604800
|
||||
elseif unit == "months" then
|
||||
seconds = amount * 2592000
|
||||
elseif unit == "years" then
|
||||
seconds = amount * 31536000
|
||||
end
|
||||
|
||||
return timestamp + seconds
|
||||
end
|
||||
|
||||
-- Get date range
|
||||
function M.get_date_range(start_date, end_date)
|
||||
local start_timestamp = M.parse_date(start_date)
|
||||
local end_timestamp = M.parse_date(end_date)
|
||||
|
||||
if not start_timestamp or not end_timestamp then
|
||||
return nil
|
||||
end
|
||||
|
||||
return {
|
||||
start_timestamp = start_timestamp,
|
||||
end_timestamp = end_timestamp,
|
||||
start_formatted = M.format_date(start_timestamp),
|
||||
end_formatted = M.format_date(end_timestamp),
|
||||
duration_days = math.floor((end_timestamp - start_timestamp) / 86400)
|
||||
}
|
||||
end
|
||||
|
||||
-- Get week start/end
|
||||
function M.get_week_bounds(timestamp)
|
||||
timestamp = timestamp or os.time()
|
||||
|
||||
local date_table = os.date("*t", timestamp)
|
||||
local day_of_week = date_table.wday -- Sunday = 1, Monday = 2, etc.
|
||||
|
||||
-- Adjust to Monday = 0
|
||||
day_of_week = (day_of_week + 5) % 7
|
||||
|
||||
local week_start = timestamp - (day_of_week * 86400)
|
||||
local week_end = week_start + (6 * 86400)
|
||||
|
||||
return {
|
||||
start_timestamp = week_start,
|
||||
end_timestamp = week_end,
|
||||
start_formatted = M.format_date(week_start),
|
||||
end_formatted = M.format_date(week_end)
|
||||
}
|
||||
end
|
||||
|
||||
-- Get month start/end
|
||||
function M.get_month_bounds(timestamp)
|
||||
timestamp = timestamp or os.time()
|
||||
local date_table = os.date("*t", timestamp)
|
||||
|
||||
local month_start = os.time({
|
||||
year = date_table.year,
|
||||
month = date_table.month,
|
||||
day = 1,
|
||||
hour = 0,
|
||||
min = 0,
|
||||
sec = 0
|
||||
})
|
||||
|
||||
local next_month = os.time({
|
||||
year = date_table.year,
|
||||
month = date_table.month + 1,
|
||||
day = 1,
|
||||
hour = 0,
|
||||
min = 0,
|
||||
sec = 0
|
||||
})
|
||||
|
||||
local month_end = next_month - 1
|
||||
|
||||
return {
|
||||
start_timestamp = month_start,
|
||||
end_timestamp = month_end,
|
||||
start_formatted = M.format_date(month_start),
|
||||
end_formatted = M.format_date(month_end)
|
||||
}
|
||||
end
|
||||
|
||||
-- Get time zones
|
||||
function M.get_timezones()
|
||||
return {
|
||||
"UTC",
|
||||
"America/New_York",
|
||||
"America/Chicago",
|
||||
"America/Denver",
|
||||
"America/Los_Angeles",
|
||||
"Europe/London",
|
||||
"Europe/Paris",
|
||||
"Asia/Tokyo",
|
||||
"Australia/Sydney"
|
||||
}
|
||||
end
|
||||
|
||||
return M
|
402
lua/notex/utils/errors.lua
Normal file
402
lua/notex/utils/errors.lua
Normal file
|
@ -0,0 +1,402 @@
|
|||
-- Centralized error handling and recovery system
|
||||
local M = {}
|
||||
|
||||
local logging = require('notex.utils.logging')
|
||||
|
||||
-- Error types with specific handling strategies
|
||||
local ERROR_TYPES = {
|
||||
DATABASE_CONNECTION = {
|
||||
category = "database",
|
||||
recoverable = true,
|
||||
retry_strategy = "exponential_backoff",
|
||||
max_retries = 3,
|
||||
user_message = "Database connection error. Retrying..."
|
||||
},
|
||||
DATABASE_QUERY = {
|
||||
category = "database",
|
||||
recoverable = false,
|
||||
retry_strategy = "none",
|
||||
max_retries = 0,
|
||||
user_message = "Query execution failed. Please check your query syntax."
|
||||
},
|
||||
FILE_NOT_FOUND = {
|
||||
category = "filesystem",
|
||||
recoverable = true,
|
||||
retry_strategy = "immediate",
|
||||
max_retries = 1,
|
||||
user_message = "File not found. It may have been moved or deleted."
|
||||
},
|
||||
FILE_PARSE_ERROR = {
|
||||
category = "parsing",
|
||||
recoverable = false,
|
||||
retry_strategy = "none",
|
||||
max_retries = 0,
|
||||
user_message = "Failed to parse file. Please check the file format."
|
||||
},
|
||||
QUERY_SYNTAX_ERROR = {
|
||||
category = "query",
|
||||
recoverable = false,
|
||||
retry_strategy = "none",
|
||||
max_retries = 0,
|
||||
user_message = "Query syntax error. Please check your query syntax."
|
||||
},
|
||||
VALIDATION_ERROR = {
|
||||
category = "validation",
|
||||
recoverable = false,
|
||||
retry_strategy = "none",
|
||||
max_retries = 0,
|
||||
user_message = "Validation error. Please check your input."
|
||||
},
|
||||
UI_ERROR = {
|
||||
category = "ui",
|
||||
recoverable = true,
|
||||
retry_strategy = "immediate",
|
||||
max_retries = 1,
|
||||
user_message = "UI error. Attempting to recover..."
|
||||
},
|
||||
PERMISSION_ERROR = {
|
||||
category = "filesystem",
|
||||
recoverable = false,
|
||||
retry_strategy = "none",
|
||||
max_retries = 0,
|
||||
user_message = "Permission denied. Please check file permissions."
|
||||
},
|
||||
NETWORK_ERROR = {
|
||||
category = "network",
|
||||
recoverable = true,
|
||||
retry_strategy = "exponential_backoff",
|
||||
max_retries = 3,
|
||||
user_message = "Network error. Retrying..."
|
||||
},
|
||||
PERFORMANCE_TIMEOUT = {
|
||||
category = "performance",
|
||||
recoverable = true,
|
||||
retry_strategy = "immediate",
|
||||
max_retries = 1,
|
||||
user_message = "Operation timed out. Retrying with simpler approach..."
|
||||
}
|
||||
}
|
||||
|
||||
-- Error state tracking
|
||||
local error_state = {
|
||||
recent_errors = {},
|
||||
error_counts = {},
|
||||
last_recovery_attempt = {},
|
||||
recovery_in_progress = {}
|
||||
}
|
||||
|
||||
-- Create standardized error object
|
||||
function M.create_error(error_type, message, context, original_error)
|
||||
local error_def = ERROR_TYPES[error_type] or ERROR_TYPES.UI_ERROR
|
||||
local error_obj = {
|
||||
type = error_type,
|
||||
message = message,
|
||||
context = context or {},
|
||||
original_error = original_error,
|
||||
timestamp = os.time(),
|
||||
recoverable = error_def.recoverable,
|
||||
category = error_def.category,
|
||||
user_message = error_def.user_message,
|
||||
retry_strategy = error_def.retry_strategy,
|
||||
max_retries = error_def.max_retries,
|
||||
error_id = M.generate_error_id()
|
||||
}
|
||||
|
||||
-- Track error
|
||||
M.track_error(error_obj)
|
||||
|
||||
return error_obj
|
||||
end
|
||||
|
||||
-- Generate unique error ID
|
||||
function M.generate_error_id()
|
||||
return string.format("ERR_%d_%s", os.time(), math.random(1000, 9999))
|
||||
end
|
||||
|
||||
-- Track error occurrence
|
||||
function M.track_error(error_obj)
|
||||
-- Add to recent errors
|
||||
table.insert(error_state.recent_errors, error_obj)
|
||||
|
||||
-- Keep only last 50 errors
|
||||
if #error_state.recent_errors > 50 then
|
||||
table.remove(error_state.recent_errors, 1)
|
||||
end
|
||||
|
||||
-- Update error counts
|
||||
local key = error_obj.type
|
||||
error_state.error_counts[key] = (error_state.error_counts[key] or 0) + 1
|
||||
|
||||
-- Log the error
|
||||
logging.handle_error(error_obj.message, error_obj.category, error_obj)
|
||||
end
|
||||
|
||||
-- Check if error should be retried
|
||||
function M.should_retry(error_obj, current_attempt)
|
||||
if not error_obj.recoverable then
|
||||
return false, "Error is not recoverable"
|
||||
end
|
||||
|
||||
if current_attempt >= error_obj.max_retries then
|
||||
return false, "Maximum retries exceeded"
|
||||
end
|
||||
|
||||
-- Check if we recently attempted recovery for this error type
|
||||
local last_attempt = error_state.last_recovery_attempt[error_obj.type]
|
||||
if last_attempt and (os.time() - last_attempt) < 5 then
|
||||
return false, "Recovery attempt too recent"
|
||||
end
|
||||
|
||||
return true, "Retry allowed"
|
||||
end
|
||||
|
||||
-- Execute operation with error handling and recovery
|
||||
function M.safe_execute(operation, error_type, context, func, ...)
|
||||
local current_attempt = 0
|
||||
local max_attempts = (ERROR_TYPES[error_type] and ERROR_TYPES[error_type].max_retries or 0) + 1
|
||||
|
||||
while current_attempt < max_attempts do
|
||||
local success, result = pcall(func, ...)
|
||||
|
||||
if success then
|
||||
-- Reset recovery state on success
|
||||
error_state.last_recovery_attempt[error_type] = nil
|
||||
error_state.recovery_in_progress[error_type] = nil
|
||||
return true, result
|
||||
else
|
||||
current_attempt = current_attempt + 1
|
||||
local error_obj = M.create_error(error_type, result, context)
|
||||
|
||||
local should_retry, retry_reason = M.should_retry(error_obj, current_attempt)
|
||||
|
||||
if should_retry and current_attempt < max_attempts then
|
||||
error_state.last_recovery_attempt[error_type] = os.time()
|
||||
error_state.recovery_in_progress[error_type] = true
|
||||
|
||||
-- Show user message
|
||||
if error_obj.user_message then
|
||||
vim.notify(error_obj.user_message, vim.log.levels.WARN)
|
||||
end
|
||||
|
||||
-- Apply retry strategy
|
||||
M.apply_retry_strategy(error_obj.retry_strategy, current_attempt)
|
||||
|
||||
logging.info("Retrying operation", {
|
||||
operation = operation,
|
||||
attempt = current_attempt,
|
||||
error_type = error_type,
|
||||
reason = retry_reason
|
||||
})
|
||||
else
|
||||
-- Final failure
|
||||
error_state.recovery_in_progress[error_type] = nil
|
||||
|
||||
-- Show final error message
|
||||
M.show_final_error(error_obj, current_attempt)
|
||||
|
||||
return false, error_obj
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return false, "Operation failed after all retry attempts"
|
||||
end
|
||||
|
||||
-- Apply retry strategy
|
||||
function M.apply_retry_strategy(strategy, attempt)
|
||||
if strategy == "immediate" then
|
||||
-- No delay
|
||||
elseif strategy == "exponential_backoff" then
|
||||
local delay = math.min(2 ^ attempt, 10) -- Cap at 10 seconds
|
||||
vim.defer_fn(function() end, delay * 1000)
|
||||
elseif strategy == "linear_backoff" then
|
||||
local delay = attempt * 1000 -- 1 second per attempt
|
||||
vim.defer_fn(function() end, delay)
|
||||
end
|
||||
end
|
||||
|
||||
-- Show final error to user
|
||||
function M.show_final_error(error_obj, attempt_count)
|
||||
local message = string.format("%s (%d attempts made)", error_obj.user_message or error_obj.message, attempt_count)
|
||||
|
||||
if error_obj.category == "validation" or error_obj.category == "query" then
|
||||
vim.notify(message, vim.log.levels.ERROR)
|
||||
elseif error_obj.category == "filesystem" or error_obj.category == "database" then
|
||||
vim.notify(message, vim.log.levels.ERROR)
|
||||
else
|
||||
vim.notify(message, vim.log.levels.WARN)
|
||||
end
|
||||
end
|
||||
|
||||
-- Wrap function for safe execution
|
||||
function M.wrap(operation_name, error_type, func)
|
||||
return function(...)
|
||||
return M.safe_execute(operation_name, error_type, {operation = operation_name}, func, ...)
|
||||
end
|
||||
end
|
||||
|
||||
-- Handle specific error types with custom recovery
|
||||
local error_handlers = {}
|
||||
|
||||
function M.register_error_handler(error_type, handler)
|
||||
error_handlers[error_type] = handler
|
||||
end
|
||||
|
||||
function M.handle_specific_error(error_obj)
|
||||
local handler = error_handlers[error_obj.type]
|
||||
if handler then
|
||||
local success, result = pcall(handler, error_obj)
|
||||
if success then
|
||||
return result
|
||||
else
|
||||
logging.error("Error handler failed", {
|
||||
error_type = error_obj.type,
|
||||
handler_error = result
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Register default error handlers
|
||||
M.register_error_handler("DATABASE_CONNECTION", function(error_obj)
|
||||
-- Try to reinitialize database connection
|
||||
local database = require('notex.database.init')
|
||||
local ok, err = database.reconnect()
|
||||
if ok then
|
||||
vim.notify("Database connection restored", vim.log.levels.INFO)
|
||||
return true
|
||||
end
|
||||
return false
|
||||
end)
|
||||
|
||||
M.register_error_handler("FILE_NOT_FOUND", function(error_obj)
|
||||
-- Remove from index if file no longer exists
|
||||
if error_obj.context and error_obj.context.file_path then
|
||||
local indexer = require('notex.index')
|
||||
local ok, err = indexer.remove_document_by_path(error_obj.context.file_path)
|
||||
if ok then
|
||||
vim.notify("Removed missing file from index", vim.log.levels.INFO)
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end)
|
||||
|
||||
M.register_error_handler("UI_ERROR", function(error_obj)
|
||||
-- Try to cleanup UI state
|
||||
local ui = require('notex.ui')
|
||||
ui.cleanup_all()
|
||||
vim.notify("UI state reset", vim.log.levels.INFO)
|
||||
return true
|
||||
end)
|
||||
|
||||
-- Get error statistics
|
||||
function M.get_error_statistics()
|
||||
local stats = {
|
||||
total_errors = 0,
|
||||
by_type = vim.deepcopy(error_state.error_counts),
|
||||
recent_errors = vim.list_slice(error_state.recent_errors, -10), -- Last 10 errors
|
||||
recovery_in_progress = vim.deepcopy(error_state.recovery_in_progress)
|
||||
}
|
||||
|
||||
-- Calculate total
|
||||
for _, count in pairs(error_state.error_counts) do
|
||||
stats.total_errors = stats.total_errors + count
|
||||
end
|
||||
|
||||
-- Get error rate in last hour
|
||||
local one_hour_ago = os.time() - 3600
|
||||
local recent_count = 0
|
||||
for _, error in ipairs(error_state.recent_errors) do
|
||||
if error.timestamp > one_hour_ago then
|
||||
recent_count = recent_count + 1
|
||||
end
|
||||
end
|
||||
stats.errors_per_hour = recent_count
|
||||
|
||||
return stats
|
||||
end
|
||||
|
||||
-- Clear error history
|
||||
function M.clear_error_history()
|
||||
error_state.recent_errors = {}
|
||||
error_state.error_counts = {}
|
||||
error_state.last_recovery_attempt = {}
|
||||
error_state.recovery_in_progress = {}
|
||||
|
||||
logging.info("Error history cleared")
|
||||
end
|
||||
|
||||
-- Check system health based on errors
|
||||
function M.check_system_health()
|
||||
local stats = M.get_error_statistics()
|
||||
local health = {
|
||||
status = "healthy",
|
||||
issues = {},
|
||||
recommendations = {}
|
||||
}
|
||||
|
||||
-- Check error rate
|
||||
if stats.errors_per_hour > 10 then
|
||||
health.status = "degraded"
|
||||
table.insert(health.issues, "High error rate: " .. stats.errors_per_hour .. " errors/hour")
|
||||
table.insert(health.recommendations, "Check system logs for recurring issues")
|
||||
end
|
||||
|
||||
-- Check for stuck recovery operations
|
||||
local stuck_recoveries = 0
|
||||
for error_type, in_progress in pairs(error_state.recovery_in_progress) do
|
||||
if in_progress then
|
||||
stuck_recoveries = stuck_recoveries + 1
|
||||
end
|
||||
end
|
||||
|
||||
if stuck_recoveries > 0 then
|
||||
health.status = "degraded"
|
||||
table.insert(health.issues, stuck_recoveries .. " recovery operations in progress")
|
||||
table.insert(health.recommendations, "Consider restarting the plugin")
|
||||
end
|
||||
|
||||
-- Check for specific error patterns
|
||||
local db_errors = error_state.error_counts["DATABASE_CONNECTION"] or 0
|
||||
local file_errors = error_state.error_counts["FILE_NOT_FOUND"] or 0
|
||||
|
||||
if db_errors > 5 then
|
||||
health.status = "unhealthy"
|
||||
table.insert(health.issues, "Frequent database connection errors")
|
||||
table.insert(health.recommendations, "Check database file permissions and disk space")
|
||||
end
|
||||
|
||||
if file_errors > 10 then
|
||||
table.insert(health.issues, "Many file not found errors")
|
||||
table.insert(health.recommendations, "Consider reindexing the workspace")
|
||||
end
|
||||
|
||||
return health
|
||||
end
|
||||
|
||||
-- Create user-friendly error messages
|
||||
function M.format_error_for_user(error_obj)
|
||||
local message = error_obj.user_message or error_obj.message
|
||||
|
||||
-- Add contextual information
|
||||
if error_obj.context.operation then
|
||||
message = message .. " (during: " .. error_obj.context.operation .. ")"
|
||||
end
|
||||
|
||||
if error_obj.context.file_path then
|
||||
message = message .. " (file: " .. vim.fn.fnamemodify(error_obj.context.file_path, ":t") .. ")"
|
||||
end
|
||||
|
||||
-- Add error ID for support
|
||||
message = message .. " [ID: " .. error_obj.error_id .. "]"
|
||||
|
||||
return message
|
||||
end
|
||||
|
||||
-- Export error types for use in other modules
|
||||
M.ERROR_TYPES = ERROR_TYPES
|
||||
|
||||
return M
|
213
lua/notex/utils/init.lua
Normal file
213
lua/notex/utils/init.lua
Normal file
|
@ -0,0 +1,213 @@
|
|||
-- Core utility functions
|
||||
local M = {}
|
||||
|
||||
-- Import submodules
|
||||
local logging = require('notex.utils.logging')
|
||||
local errors = require('notex.utils.errors')
|
||||
local types = require('notex.utils.types')
|
||||
local validation = require('notex.utils.validation')
|
||||
local date_utils = require('notex.utils.date')
|
||||
local cache = require('notex.utils.cache')
|
||||
|
||||
-- Generate unique ID
|
||||
function M.generate_id()
|
||||
local template = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'
|
||||
return string.gsub(template, '[xy]', function(c)
|
||||
local v = (c == 'x') and math.random(0, 0xf) or math.random(8, 0xb)
|
||||
return string.format('%x', v)
|
||||
end)
|
||||
end
|
||||
|
||||
-- Generate SHA256 hash
|
||||
function M.sha256(data)
|
||||
local cmd = string.format("echo '%s' | sha256sum | cut -d' ' -f1", data)
|
||||
local handle = io.popen(cmd)
|
||||
if not handle then
|
||||
return nil
|
||||
end
|
||||
|
||||
local result = handle:read("*a")
|
||||
handle:close()
|
||||
|
||||
return result:gsub("%s+", "")
|
||||
end
|
||||
|
||||
-- Deep merge tables
|
||||
function M.deep_merge(target, source)
|
||||
for key, value in pairs(source) do
|
||||
if type(value) == 'table' and type(target[key]) == 'table' then
|
||||
M.deep_merge(target[key], value)
|
||||
else
|
||||
target[key] = value
|
||||
end
|
||||
end
|
||||
return target
|
||||
end
|
||||
|
||||
-- Check if file exists
|
||||
function M.file_exists(path)
|
||||
local f = io.open(path, "r")
|
||||
if f then
|
||||
f:close()
|
||||
return true
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
-- Read file content
|
||||
function M.read_file(path)
|
||||
local file = io.open(path, "r")
|
||||
if not file then
|
||||
return nil, "Cannot open file: " .. path
|
||||
end
|
||||
|
||||
local content = file:read("*a")
|
||||
file:close()
|
||||
|
||||
return content
|
||||
end
|
||||
|
||||
-- Write file content
|
||||
function M.write_file(path, content)
|
||||
local file = io.open(path, "w")
|
||||
if not file then
|
||||
return false, "Cannot write to file: " .. path
|
||||
end
|
||||
|
||||
file:write(content)
|
||||
file:close()
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Get file modification time
|
||||
function M.get_file_mtime(path)
|
||||
local handle = io.popen("stat -c %Y " .. path .. " 2>/dev/null")
|
||||
if not handle then
|
||||
return nil
|
||||
end
|
||||
|
||||
local result = handle:read("*a")
|
||||
handle:close()
|
||||
|
||||
local mtime = tonumber(result:gsub("%s+", ""))
|
||||
return mtime
|
||||
end
|
||||
|
||||
-- Validate file encoding (UTF-8 check)
|
||||
function M.is_utf8(path)
|
||||
local file = io.open(path, "rb")
|
||||
if not file then
|
||||
return false
|
||||
end
|
||||
|
||||
local content = file:read("*a")
|
||||
file:close()
|
||||
|
||||
-- Simple UTF-8 validation
|
||||
local valid, pos = utf8.len(content)
|
||||
return valid ~= nil
|
||||
end
|
||||
|
||||
-- Format error message
|
||||
function M.format_error(error_type, message, context)
|
||||
local error_obj = {
|
||||
error_type = error_type,
|
||||
message = message,
|
||||
timestamp = os.time()
|
||||
}
|
||||
|
||||
if context then
|
||||
error_obj.context = context
|
||||
end
|
||||
|
||||
return error_obj
|
||||
end
|
||||
|
||||
-- Forward logging functions to centralized logging
|
||||
M.trace = logging.trace
|
||||
M.debug = logging.debug
|
||||
M.info = logging.info
|
||||
M.warn = logging.warn
|
||||
M.error = logging.error
|
||||
M.fatal = logging.fatal
|
||||
M.log = logging.log
|
||||
M.timer = logging.timer
|
||||
|
||||
-- Forward error handling functions
|
||||
M.handle_error = errors.handle_error
|
||||
M.safe_execute = errors.safe_execute
|
||||
M.wrap = errors.wrap
|
||||
M.create_error = errors.create_error
|
||||
|
||||
-- Forward type utilities
|
||||
M.detect_type = types.detect_type
|
||||
M.convert_to_type = types.convert_to_type
|
||||
M.infer_schema = types.infer_schema
|
||||
|
||||
-- Forward validation utilities
|
||||
M.validate_value = validation.validate_value
|
||||
M.validate_document_properties = validation.validate_document_properties
|
||||
M.sanitize_input = validation.sanitize_input
|
||||
|
||||
-- Forward date utilities
|
||||
M.parse_date = date_utils.parse_date
|
||||
M.format_date = date_utils.format_date
|
||||
M.get_relative_time = date_utils.get_relative_time
|
||||
|
||||
-- Forward cache utilities
|
||||
M.cache_set = cache.set
|
||||
M.cache_get = cache.get
|
||||
M.cache_get_or_set = cache.get_or_set
|
||||
M.cache_invalidate = cache.invalidate
|
||||
M.cache_clear_all = cache.clear_all
|
||||
M.cache_get_stats = cache.get_stats
|
||||
M.cache_init = cache.init
|
||||
|
||||
-- Simple timer for backward compatibility
|
||||
function M.simple_timer(name)
|
||||
local start_time = vim.loop.hrtime()
|
||||
|
||||
return function()
|
||||
local end_time = vim.loop.hrtime()
|
||||
local elapsed_ms = (end_time - start_time) / 1e6
|
||||
M.log("INFO", string.format("%s completed in %.2fms", name, elapsed_ms))
|
||||
return elapsed_ms
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate data types
|
||||
function M.validate_type(value, expected_type)
|
||||
local actual_type = type(value)
|
||||
|
||||
if expected_type == "number" then
|
||||
return actual_type == "number"
|
||||
elseif expected_type == "string" then
|
||||
return actual_type == "string"
|
||||
elseif expected_type == "boolean" then
|
||||
return actual_type == "boolean"
|
||||
elseif expected_type == "table" then
|
||||
return actual_type == "table"
|
||||
elseif expected_type == "function" then
|
||||
return actual_type == "function"
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
-- Escape SQL values
|
||||
function M.escape_sql(value)
|
||||
if type(value) == "string" then
|
||||
return "'" .. value:gsub("'", "''") .. "'"
|
||||
elseif type(value) == "number" then
|
||||
return tostring(value)
|
||||
elseif type(value) == "boolean" then
|
||||
return value and "1" or "0"
|
||||
elseif value == nil then
|
||||
return "NULL"
|
||||
else
|
||||
return "'" .. tostring(value):gsub("'", "''") .. "'"
|
||||
end
|
||||
end
|
||||
|
||||
return M
|
382
lua/notex/utils/logging.lua
Normal file
382
lua/notex/utils/logging.lua
Normal file
|
@ -0,0 +1,382 @@
|
|||
-- Centralized logging and error handling system
|
||||
local M = {}
|
||||
|
||||
-- Log levels
|
||||
local LOG_LEVELS = {
|
||||
TRACE = 1,
|
||||
DEBUG = 2,
|
||||
INFO = 3,
|
||||
WARN = 4,
|
||||
ERROR = 5,
|
||||
FATAL = 6
|
||||
}
|
||||
|
||||
-- Current log level (can be configured)
|
||||
local current_log_level = LOG_LEVELS.INFO
|
||||
|
||||
-- Log file configuration
|
||||
local log_config = {
|
||||
file_enabled = true,
|
||||
file_path = nil, -- Will be set to stdpath('data')/notex/notex.log
|
||||
max_file_size = 1024 * 1024, -- 1MB
|
||||
backup_count = 3,
|
||||
console_enabled = true
|
||||
}
|
||||
|
||||
-- Error categories for better handling
|
||||
local ERROR_CATEGORIES = {
|
||||
DATABASE = "database",
|
||||
PARSING = "parsing",
|
||||
QUERY = "query",
|
||||
UI = "ui",
|
||||
FILESYSTEM = "filesystem",
|
||||
VALIDATION = "validation",
|
||||
CONFIGURATION = "configuration",
|
||||
NETWORK = "network",
|
||||
PERFORMANCE = "performance"
|
||||
}
|
||||
|
||||
-- Error context stack for nested operations
|
||||
local error_context = {}
|
||||
|
||||
-- Performance tracking
|
||||
local performance_metrics = {
|
||||
query_times = {},
|
||||
index_times = {},
|
||||
operation_counts = {},
|
||||
error_counts = {}
|
||||
}
|
||||
|
||||
-- Initialize logging system
|
||||
function M.init(config)
|
||||
config = config or {}
|
||||
|
||||
-- Set log level
|
||||
if config.log_level then
|
||||
local level = LOG_LEVELS[config.log_level:upper()]
|
||||
if level then
|
||||
current_log_level = level
|
||||
end
|
||||
end
|
||||
|
||||
-- Configure logging
|
||||
log_config = vim.tbl_deep_extend("force", log_config, config)
|
||||
|
||||
-- Set default log file path
|
||||
if not log_config.file_path then
|
||||
log_config.file_path = vim.fn.stdpath('data') .. '/notex/notex.log'
|
||||
end
|
||||
|
||||
-- Ensure log directory exists
|
||||
local log_dir = vim.fn.fnamemodify(log_config.file_path, ':h')
|
||||
vim.fn.mkdir(log_dir, 'p')
|
||||
|
||||
-- Clean up old log files
|
||||
M.cleanup_log_files()
|
||||
|
||||
M.log("INFO", "Logging system initialized", {
|
||||
log_level = M.get_log_level_name(),
|
||||
log_file = log_config.file_path
|
||||
})
|
||||
|
||||
return true, "Logging system initialized"
|
||||
end
|
||||
|
||||
-- Core logging function
|
||||
function M.log(level, message, context)
|
||||
level = level:upper()
|
||||
local level_value = LOG_LEVELS[level] or LOG_LEVELS.INFO
|
||||
|
||||
-- Skip if below current log level
|
||||
if level_value < current_log_level then
|
||||
return
|
||||
end
|
||||
|
||||
local timestamp = os.date("%Y-%m-%d %H:%M:%S")
|
||||
local context_str = context and " | " .. vim.inspect(context) or ""
|
||||
local log_entry = string.format("[%s] %s: %s%s", timestamp, level, message, context_str)
|
||||
|
||||
-- Console output
|
||||
if log_config.console_enabled then
|
||||
if level_value >= LOG_LEVELS.ERROR then
|
||||
vim.notify(message, vim.log.levels.ERROR)
|
||||
elseif level_value >= LOG_LEVELS.WARN then
|
||||
vim.notify(message, vim.log.levels.WARN)
|
||||
elseif level_value >= LOG_LEVELS.INFO then
|
||||
vim.notify(message, vim.log.levels.INFO)
|
||||
else
|
||||
-- Debug/trace go to message history but not notifications
|
||||
vim.schedule(function()
|
||||
vim.cmd('echomsg "' .. message:gsub('"', '\\"') .. '"')
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
-- File output
|
||||
if log_config.file_enabled then
|
||||
M.write_to_file(log_entry)
|
||||
end
|
||||
end
|
||||
|
||||
-- Write to log file with rotation
|
||||
function M.write_to_file(log_entry)
|
||||
-- Check file size and rotate if necessary
|
||||
local file_info = vim.fn.getfsize(log_config.file_path)
|
||||
if file_info > log_config.max_file_size then
|
||||
M.rotate_log_file()
|
||||
end
|
||||
|
||||
-- Append to log file
|
||||
local file = io.open(log_config.file_path, "a")
|
||||
if file then
|
||||
file:write(log_entry .. "\n")
|
||||
file:close()
|
||||
end
|
||||
end
|
||||
|
||||
-- Rotate log files
|
||||
function M.rotate_log_file()
|
||||
-- Remove oldest backup
|
||||
local oldest_backup = log_config.file_path .. "." .. log_config.backup_count
|
||||
if vim.fn.filereadable(oldest_backup) > 0 then
|
||||
os.remove(oldest_backup)
|
||||
end
|
||||
|
||||
-- Rotate existing backups
|
||||
for i = log_config.backup_count - 1, 1, -1 do
|
||||
local current_backup = log_config.file_path .. "." .. i
|
||||
local next_backup = log_config.file_path .. "." .. (i + 1)
|
||||
if vim.fn.filereadable(current_backup) > 0 then
|
||||
os.rename(current_backup, next_backup)
|
||||
end
|
||||
end
|
||||
|
||||
-- Move current log to backup
|
||||
if vim.fn.filereadable(log_config.file_path) > 0 then
|
||||
os.rename(log_config.file_path, log_config.file_path .. ".1")
|
||||
end
|
||||
end
|
||||
|
||||
-- Clean up old log files
|
||||
function M.cleanup_log_files()
|
||||
for i = 1, log_config.backup_count do
|
||||
local backup_file = log_config.file_path .. "." .. i
|
||||
if vim.fn.filereadable(backup_file) > 0 then
|
||||
local file_time = vim.fn.getftime(backup_file)
|
||||
local age_days = (os.time() - file_time) / 86400
|
||||
|
||||
-- Remove backups older than 30 days
|
||||
if age_days > 30 then
|
||||
os.remove(backup_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Convenience logging functions
|
||||
function M.trace(message, context)
|
||||
M.log("TRACE", message, context)
|
||||
end
|
||||
|
||||
function M.debug(message, context)
|
||||
M.log("DEBUG", message, context)
|
||||
end
|
||||
|
||||
function M.info(message, context)
|
||||
M.log("INFO", message, context)
|
||||
end
|
||||
|
||||
function M.warn(message, context)
|
||||
M.log("WARN", message, context)
|
||||
end
|
||||
|
||||
function M.error(message, context)
|
||||
M.log("ERROR", message, context)
|
||||
|
||||
-- Track error metrics
|
||||
local category = context and context.category or "unknown"
|
||||
performance_metrics.error_counts[category] = (performance_metrics.error_counts[category] or 0) + 1
|
||||
end
|
||||
|
||||
function M.fatal(message, context)
|
||||
M.log("FATAL", message, context)
|
||||
|
||||
-- Fatal errors should also be shown as error notifications
|
||||
vim.notify("FATAL: " .. message, vim.log.levels.ERROR)
|
||||
end
|
||||
|
||||
-- Error handling with context
|
||||
function M.handle_error(error_msg, category, context)
|
||||
category = category or "unknown"
|
||||
context = context or {}
|
||||
|
||||
local error_info = {
|
||||
message = error_msg,
|
||||
category = category,
|
||||
context = context,
|
||||
timestamp = os.time(),
|
||||
stack_trace = debug.traceback()
|
||||
}
|
||||
|
||||
-- Add current error context
|
||||
if #error_context > 0 then
|
||||
error_info.nested_context = vim.deepcopy(error_context)
|
||||
end
|
||||
|
||||
M.error("Error in " .. category, error_info)
|
||||
|
||||
return error_info
|
||||
end
|
||||
|
||||
-- Push error context (for nested operations)
|
||||
function M.push_context(operation, context)
|
||||
table.insert(error_context, {
|
||||
operation = operation,
|
||||
context = context or {},
|
||||
timestamp = os.time()
|
||||
})
|
||||
end
|
||||
|
||||
-- Pop error context
|
||||
function M.pop_context()
|
||||
return table.remove(error_context)
|
||||
end
|
||||
|
||||
-- Execute with error context
|
||||
function M.with_context(operation, context, func, ...)
|
||||
M.push_context(operation, context)
|
||||
|
||||
local results = {pcall(func, ...)}
|
||||
local success = table.remove(results, 1)
|
||||
|
||||
M.pop_context()
|
||||
|
||||
if not success then
|
||||
local error_msg = results[1]
|
||||
M.handle_error(error_msg, context.category or "operation", context)
|
||||
return nil, error_msg
|
||||
end
|
||||
|
||||
return unpack(results)
|
||||
end
|
||||
|
||||
-- Performance tracking
|
||||
function M.start_timer(operation)
|
||||
return {
|
||||
operation = operation,
|
||||
start_time = vim.loop.hrtime()
|
||||
}
|
||||
end
|
||||
|
||||
function M.end_timer(timer)
|
||||
local end_time = vim.loop.hrtime()
|
||||
local duration_ms = (end_time - timer.start_time) / 1000000
|
||||
|
||||
-- Store performance metrics
|
||||
if timer.operation:match("query") then
|
||||
table.insert(performance_metrics.query_times, duration_ms)
|
||||
-- Keep only last 100 measurements
|
||||
if #performance_metrics.query_times > 100 then
|
||||
table.remove(performance_metrics.query_times, 1)
|
||||
end
|
||||
elseif timer.operation:match("index") then
|
||||
table.insert(performance_metrics.index_times, duration_ms)
|
||||
if #performance_metrics.index_times > 100 then
|
||||
table.remove(performance_metrics.index_times, 1)
|
||||
end
|
||||
end
|
||||
|
||||
-- Track operation counts
|
||||
performance_metrics.operation_counts[timer.operation] = (performance_metrics.operation_counts[timer.operation] or 0) + 1
|
||||
|
||||
M.debug("Operation completed", {
|
||||
operation = timer.operation,
|
||||
duration_ms = duration_ms
|
||||
})
|
||||
|
||||
return duration_ms
|
||||
end
|
||||
|
||||
-- Timer utility function
|
||||
function M.timer(operation)
|
||||
return function()
|
||||
return M.end_timer(M.start_timer(operation))
|
||||
end
|
||||
end
|
||||
|
||||
-- Get performance statistics
|
||||
function M.get_performance_stats()
|
||||
local stats = {
|
||||
operations = vim.deepcopy(performance_metrics.operation_counts),
|
||||
errors = vim.deepcopy(performance_metrics.error_counts)
|
||||
}
|
||||
|
||||
-- Calculate averages
|
||||
if #performance_metrics.query_times > 0 then
|
||||
local total = 0
|
||||
for _, time in ipairs(performance_metrics.query_times) do
|
||||
total = total + time
|
||||
end
|
||||
stats.average_query_time = total / #performance_metrics.query_times
|
||||
stats.query_count = #performance_metrics.query_times
|
||||
end
|
||||
|
||||
if #performance_metrics.index_times > 0 then
|
||||
local total = 0
|
||||
for _, time in ipairs(performance_metrics.index_times) do
|
||||
total = total + time
|
||||
end
|
||||
stats.average_index_time = total / #performance_metrics.index_times
|
||||
stats.index_count = #performance_metrics.index_times
|
||||
end
|
||||
|
||||
return stats
|
||||
end
|
||||
|
||||
-- Get log level name
|
||||
function M.get_log_level_name()
|
||||
for name, level in pairs(LOG_LEVELS) do
|
||||
if level == current_log_level then
|
||||
return name
|
||||
end
|
||||
end
|
||||
return "UNKNOWN"
|
||||
end
|
||||
|
||||
-- Set log level
|
||||
function M.set_log_level(level)
|
||||
local level_value = LOG_LEVELS[level:upper()]
|
||||
if level_value then
|
||||
current_log_level = level_value
|
||||
M.info("Log level changed to " .. level:upper())
|
||||
return true
|
||||
end
|
||||
return false, "Invalid log level: " .. tostring(level)
|
||||
end
|
||||
|
||||
-- Configuration validation
|
||||
function M.validate_config(config)
|
||||
local errors = {}
|
||||
|
||||
-- Validate log level
|
||||
if config.log_level and not LOG_LEVELS[config.log_level:upper()] then
|
||||
table.insert(errors, "Invalid log level: " .. config.log_level)
|
||||
end
|
||||
|
||||
-- Validate file size
|
||||
if config.max_file_size and (type(config.max_file_size) ~= "number" or config.max_file_size <= 0) then
|
||||
table.insert(errors, "max_file_size must be a positive number")
|
||||
end
|
||||
|
||||
-- Validate backup count
|
||||
if config.backup_count and (type(config.backup_count) ~= "number" or config.backup_count < 1) then
|
||||
table.insert(errors, "backup_count must be a positive number")
|
||||
end
|
||||
|
||||
return #errors == 0, errors
|
||||
end
|
||||
|
||||
-- Export error categories for use in other modules
|
||||
M.ERROR_CATEGORIES = ERROR_CATEGORIES
|
||||
|
||||
return M
|
381
lua/notex/utils/types.lua
Normal file
381
lua/notex/utils/types.lua
Normal file
|
@ -0,0 +1,381 @@
|
|||
-- Type detection and conversion utilities
|
||||
local M = {}
|
||||
|
||||
-- Type definitions
|
||||
local TYPE_PATTERNS = {
|
||||
BOOLEAN_TRUE = "^(true|yes|on|enabled|1)$",
|
||||
BOOLEAN_FALSE = "^(false|no|off|disabled|0)$",
|
||||
INTEGER = "^%-?%d+$",
|
||||
FLOAT = "^%-?%d*%.?%d+$",
|
||||
DATE_ISO8601 = "^%d%d%d%d%-%d%d%-%d%d$",
|
||||
DATE_ISO8601_TIME = "^%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d",
|
||||
URL = "^https?://[%w%-%.~%/?:%[#%][%]%@!$&'()*+,;=]*$",
|
||||
EMAIL = "^[%w%-%.]+@[%w%-%.]+%.%w+$",
|
||||
ARRAY_JSON = "^%[.*%]$",
|
||||
OBJECT_JSON = "^{.*}$"
|
||||
}
|
||||
|
||||
-- Type detection rules
|
||||
local TYPE_DETECTION_RULES = {
|
||||
-- Specific patterns first
|
||||
{pattern = TYPE_PATTERNS.BOOLEAN_TRUE, type = "boolean", value = true},
|
||||
{pattern = TYPE_PATTERNS.BOOLEAN_FALSE, type = "boolean", value = false},
|
||||
{pattern = TYPE_PATTERNS.DATE_ISO8601_TIME, type = "date"},
|
||||
{pattern = TYPE_PATTERNS.DATE_ISO8601, type = "date"},
|
||||
{pattern = TYPE_PATTERNS.URL, type = "url"},
|
||||
{pattern = TYPE_PATTERNS.EMAIL, type = "email"},
|
||||
{pattern = TYPE_PATTERNS.ARRAY_JSON, type = "array"},
|
||||
{pattern = TYPE_PATTERNS.OBJECT_JSON, type = "object"},
|
||||
|
||||
-- Numeric patterns
|
||||
{pattern = TYPE_PATTERNS.FLOAT, type = "number"},
|
||||
{pattern = TYPE_PATTERNS.INTEGER, type = "number"},
|
||||
|
||||
-- Default
|
||||
{pattern = ".*", type = "string"}
|
||||
}
|
||||
|
||||
-- Detect type of value
|
||||
function M.detect_type(value)
|
||||
if value == nil then
|
||||
return "nil"
|
||||
end
|
||||
|
||||
local value_str = tostring(value)
|
||||
|
||||
for _, rule in ipairs(TYPE_DETECTION_RULES) do
|
||||
if value_str:match(rule.pattern) then
|
||||
if rule.value ~= nil then
|
||||
return rule.type, rule.value
|
||||
else
|
||||
return rule.type
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return "string"
|
||||
end
|
||||
|
||||
-- Convert value to specific type
|
||||
function M.convert_to_type(value, target_type)
|
||||
if value == nil then
|
||||
return nil
|
||||
end
|
||||
|
||||
local current_type, _ = M.detect_type(value)
|
||||
|
||||
if current_type == target_type then
|
||||
return value
|
||||
end
|
||||
|
||||
local value_str = tostring(value)
|
||||
|
||||
if target_type == "boolean" then
|
||||
return M.convert_to_boolean(value_str)
|
||||
elseif target_type == "number" then
|
||||
return M.convert_to_number(value_str)
|
||||
elseif target_type == "string" then
|
||||
return M.convert_to_string(value)
|
||||
elseif target_type == "date" then
|
||||
return M.convert_to_date(value_str)
|
||||
elseif target_type == "array" then
|
||||
return M.convert_to_array(value_str)
|
||||
elseif target_type == "object" then
|
||||
return M.convert_to_object(value_str)
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert to boolean
|
||||
function M.convert_to_boolean(value_str)
|
||||
local lower = value_str:lower()
|
||||
|
||||
if lower:match(TYPE_PATTERNS.BOOLEAN_TRUE) then
|
||||
return true
|
||||
elseif lower:match(TYPE_PATTERNS.BOOLEAN_FALSE) then
|
||||
return false
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert to number
|
||||
function M.convert_to_number(value_str)
|
||||
local number = tonumber(value_str)
|
||||
if number then
|
||||
return number
|
||||
end
|
||||
|
||||
-- Try scientific notation
|
||||
local sci_match = value_str:match("^([%d%.]+)e([%+%-]?%d+)$")
|
||||
if sci_match then
|
||||
return tonumber(sci_match[1] .. "e" .. sci_match[2])
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Convert to string
|
||||
function M.convert_to_string(value)
|
||||
if type(value) == "string" then
|
||||
return value
|
||||
elseif type(value) == "number" then
|
||||
return tostring(value)
|
||||
elseif type(value) == "boolean" then
|
||||
return value and "true" or "false"
|
||||
elseif type(value) == "table" then
|
||||
return vim.json.encode(value)
|
||||
else
|
||||
return tostring(value)
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert to date
|
||||
function M.convert_to_date(value_str)
|
||||
local date_parser = require('notex.utils.date')
|
||||
local timestamp = date_parser.parse_date(value_str)
|
||||
return timestamp and os.date("%Y-%m-%d", timestamp) or value_str
|
||||
end
|
||||
|
||||
-- Convert to array
|
||||
function M.convert_to_array(value_str)
|
||||
-- Handle JSON array
|
||||
if value_str:match(TYPE_PATTERNS.ARRAY_JSON) then
|
||||
local ok, result = pcall(vim.json.decode, value_str)
|
||||
if ok and type(result) == "table" then
|
||||
return result
|
||||
end
|
||||
end
|
||||
|
||||
-- Handle comma-separated values
|
||||
local items = {}
|
||||
for item in value_str:gsub("%s", ""):gmatch("[^,]+") do
|
||||
table.insert(items, item)
|
||||
end
|
||||
|
||||
return #items > 0 and items or {value_str}
|
||||
end
|
||||
|
||||
-- Convert to object
|
||||
function M.convert_to_object(value_str)
|
||||
-- Handle JSON object
|
||||
if value_str:match(TYPE_PATTERNS.OBJECT_JSON) then
|
||||
local ok, result = pcall(vim.json.decode, value_str)
|
||||
if ok and type(result) == "table" then
|
||||
return result
|
||||
end
|
||||
end
|
||||
|
||||
-- Handle key=value pairs
|
||||
local obj = {}
|
||||
for pair in value_str:gsub("%s", ""):gmatch("[^,]+") do
|
||||
local key, value = pair:match("^([^=]+)=(.*)$")
|
||||
if key and value then
|
||||
obj[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
return next(obj) and obj or {}
|
||||
end
|
||||
|
||||
-- Validate type conversion
|
||||
function M.validate_conversion(value, target_type)
|
||||
local converted = M.convert_to_type(value, target_type)
|
||||
local detected_type, _ = M.detect_type(converted)
|
||||
|
||||
return detected_type == target_type
|
||||
end
|
||||
|
||||
-- Get type info
|
||||
function M.get_type_info(value)
|
||||
local detected_type, converted_value = M.detect_type(value)
|
||||
local is_valid = true
|
||||
|
||||
local info = {
|
||||
detected_type = detected_type,
|
||||
original_value = value,
|
||||
converted_value = converted_value,
|
||||
is_valid = is_valid,
|
||||
possible_conversions = M.get_possible_conversions(value)
|
||||
}
|
||||
|
||||
return info
|
||||
end
|
||||
|
||||
-- Get possible conversions for a value
|
||||
function M.get_possible_conversions(value)
|
||||
local conversions = {}
|
||||
|
||||
for _, rule in ipairs(TYPE_DETECTION_RULES) do
|
||||
local value_str = tostring(value)
|
||||
if value_str:match(rule.pattern) then
|
||||
local converted = M.convert_to_type(value, rule.type)
|
||||
if converted ~= nil then
|
||||
table.insert(conversions, {
|
||||
type = rule.type,
|
||||
value = converted,
|
||||
is_same_type = rule.type == (M.detect_type(value))
|
||||
})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return conversions
|
||||
end
|
||||
|
||||
-- Compare types
|
||||
function M.compare_types(value1, value2)
|
||||
local type1, _ = M.detect_type(value1)
|
||||
local type2, _ = M.detect_type(value2)
|
||||
|
||||
return {
|
||||
type1 = type1,
|
||||
type2 = type2,
|
||||
same_type = type1 == type2,
|
||||
compatible = M.are_types_compatible(type1, type2)
|
||||
}
|
||||
end
|
||||
|
||||
-- Check if types are compatible
|
||||
function M.are_types_compatible(type1, type2)
|
||||
-- Exact match
|
||||
if type1 == type2 then
|
||||
return true
|
||||
end
|
||||
|
||||
-- Number compatibility
|
||||
if (type1 == "number" and type2 == "number") or
|
||||
(type1 == "integer" and type2 == "number") or
|
||||
(type1 == "number" and type2 == "integer") then
|
||||
return true
|
||||
end
|
||||
|
||||
-- String compatibility (string can represent many types)
|
||||
if type1 == "string" or type2 == "string" then
|
||||
return true
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
-- Cast value with validation
|
||||
function M.cast_value(value, target_type, strict)
|
||||
strict = strict or false
|
||||
|
||||
local converted = M.convert_to_type(value, target_type)
|
||||
|
||||
if converted == nil then
|
||||
if strict then
|
||||
error("Cannot cast value to type: " .. target_type)
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
return converted
|
||||
end
|
||||
|
||||
-- Infer schema from values
|
||||
function M.infer_schema(values)
|
||||
local schema = {}
|
||||
local type_counts = {}
|
||||
|
||||
for _, value in ipairs(values) do
|
||||
local detected_type, _ = M.detect_type(value)
|
||||
|
||||
if not type_counts[detected_type] then
|
||||
type_counts[detected_type] = 0
|
||||
end
|
||||
type_counts[detected_type] = type_counts[detected_type] + 1
|
||||
end
|
||||
|
||||
-- Find most common type
|
||||
local most_common_type = nil
|
||||
local max_count = 0
|
||||
|
||||
for type_name, count in pairs(type_counts) do
|
||||
if count > max_count then
|
||||
max_count = count
|
||||
most_common_type = type_name
|
||||
end
|
||||
end
|
||||
|
||||
schema = {
|
||||
detected_type = most_common_type,
|
||||
type_distribution = type_counts,
|
||||
confidence = max_count / #values,
|
||||
sample_size = #values,
|
||||
constraints = M.infer_constraints(values, most_common_type)
|
||||
}
|
||||
|
||||
return schema
|
||||
end
|
||||
|
||||
-- Infer constraints from values
|
||||
function M.infer_constraints(values, detected_type)
|
||||
local constraints = {}
|
||||
|
||||
if detected_type == "string" then
|
||||
local min_length = math.huge
|
||||
local max_length = 0
|
||||
local pattern = ""
|
||||
|
||||
for _, value in ipairs(values) do
|
||||
local value_str = tostring(value)
|
||||
min_length = math.min(min_length, #value_str)
|
||||
max_length = math.max(max_length, #value_str)
|
||||
end
|
||||
|
||||
constraints.min_length = min_length
|
||||
constraints.max_length = max_length
|
||||
|
||||
-- Detect common patterns
|
||||
local email_count = 0
|
||||
local url_count = 0
|
||||
local date_count = 0
|
||||
|
||||
for _, value in ipairs(values) do
|
||||
local value_str = tostring(value)
|
||||
if value_str:match(TYPE_PATTERNS.EMAIL) then
|
||||
email_count = email_count + 1
|
||||
elseif value_str:match(TYPE_PATTERNS.URL) then
|
||||
url_count = url_count + 1
|
||||
elseif value_str:match(TYPE_PATTERNS.DATE_ISO8601) or value_str:match(TYPE_PATTERNS.DATE_ISO8601_TIME) then
|
||||
date_count = date_count + 1
|
||||
end
|
||||
end
|
||||
|
||||
if email_count > #values / 2 then
|
||||
constraints.format = "email"
|
||||
elseif url_count > #values / 2 then
|
||||
constraints.format = "url"
|
||||
elseif date_count > #values / 2 then
|
||||
constraints.format = "date"
|
||||
end
|
||||
|
||||
elseif detected_type == "number" then
|
||||
local min_value = math.huge
|
||||
local max_value = -math.huge
|
||||
local is_integer = true
|
||||
|
||||
for _, value in ipairs(values) do
|
||||
local num = tonumber(value)
|
||||
if num then
|
||||
min_value = math.min(min_value, num)
|
||||
max_value = math.max(max_value, num)
|
||||
if num ~= math.floor(num) then
|
||||
is_integer = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
constraints.min_value = min_value
|
||||
constraints.max_value = max_value
|
||||
constraints.is_integer = is_integer
|
||||
end
|
||||
|
||||
return constraints
|
||||
end
|
||||
|
||||
return M
|
472
lua/notex/utils/validation.lua
Normal file
472
lua/notex/utils/validation.lua
Normal file
|
@ -0,0 +1,472 @@
|
|||
-- Data validation utilities
|
||||
local M = {}
|
||||
|
||||
local type_utils = require('notex.utils.types')
|
||||
|
||||
-- Validation rules
|
||||
local VALIDATION_RULES = {
|
||||
string = {
|
||||
min_length = 0,
|
||||
max_length = 1000,
|
||||
required = false,
|
||||
pattern = nil,
|
||||
enum = nil
|
||||
},
|
||||
number = {
|
||||
min_value = nil,
|
||||
max_value = nil,
|
||||
integer = false,
|
||||
required = false
|
||||
},
|
||||
boolean = {
|
||||
required = false
|
||||
},
|
||||
date = {
|
||||
required = false,
|
||||
min_date = nil,
|
||||
max_date = nil,
|
||||
format = "iso8601"
|
||||
},
|
||||
email = {
|
||||
required = false,
|
||||
domain_whitelist = nil
|
||||
},
|
||||
url = {
|
||||
required = false,
|
||||
schemes = {"http", "https"},
|
||||
domain_whitelist = nil
|
||||
},
|
||||
array = {
|
||||
min_items = 0,
|
||||
max_items = 100,
|
||||
item_type = nil,
|
||||
required = false
|
||||
},
|
||||
object = {
|
||||
required_fields = {},
|
||||
optional_fields = {},
|
||||
field_types = {},
|
||||
strict = false,
|
||||
required = false
|
||||
}
|
||||
}
|
||||
|
||||
-- Validate value against schema
|
||||
function M.validate_value(value, schema)
|
||||
if not schema or not schema.type then
|
||||
return false, "Schema must specify type"
|
||||
end
|
||||
|
||||
-- Handle null/nil values
|
||||
if value == nil then
|
||||
if schema.required == true then
|
||||
return false, "Value is required"
|
||||
end
|
||||
return true, "Value is optional and nil"
|
||||
end
|
||||
|
||||
-- Type validation
|
||||
local detected_type = type_utils.detect_type(value)
|
||||
if detected_type ~= schema.type then
|
||||
local converted = type_utils.convert_to_type(value, schema.type)
|
||||
if converted == nil then
|
||||
return false, string.format("Expected %s, got %s", schema.type, detected_type)
|
||||
end
|
||||
value = converted -- Use converted value for further validation
|
||||
end
|
||||
|
||||
-- Type-specific validation
|
||||
local type_validator = "validate_" .. schema.type
|
||||
if M[type_validator] then
|
||||
local valid, error = M[type_validator](value, schema)
|
||||
if not valid then
|
||||
return false, error
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Validation passed"
|
||||
end
|
||||
|
||||
-- Validate string
|
||||
function M.validate_string(value, schema)
|
||||
local str = tostring(value)
|
||||
|
||||
-- Length validation
|
||||
if schema.min_length and #str < schema.min_length then
|
||||
return false, string.format("String too short (min %d characters)", schema.min_length)
|
||||
end
|
||||
|
||||
if schema.max_length and #str > schema.max_length then
|
||||
return false, string.format("String too long (max %d characters)", schema.max_length)
|
||||
end
|
||||
|
||||
-- Pattern validation
|
||||
if schema.pattern then
|
||||
if not str:match(schema.pattern) then
|
||||
return false, "String does not match required pattern"
|
||||
end
|
||||
end
|
||||
|
||||
-- Enum validation
|
||||
if schema.enum then
|
||||
local found = false
|
||||
for _, enum_value in ipairs(schema.enum) do
|
||||
if str == tostring(enum_value) then
|
||||
found = true
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if not found then
|
||||
return false, string.format("Value must be one of: %s", vim.inspect(schema.enum))
|
||||
end
|
||||
end
|
||||
|
||||
return true, "String validation passed"
|
||||
end
|
||||
|
||||
-- Validate number
|
||||
function M.validate_number(value, schema)
|
||||
local num = tonumber(value)
|
||||
if not num then
|
||||
return false, "Value is not a number"
|
||||
end
|
||||
|
||||
-- Integer validation
|
||||
if schema.integer and num ~= math.floor(num) then
|
||||
return false, "Value must be an integer"
|
||||
end
|
||||
|
||||
-- Range validation
|
||||
if schema.min_value and num < schema.min_value then
|
||||
return false, string.format("Value too small (minimum: %s)", tostring(schema.min_value))
|
||||
end
|
||||
|
||||
if schema.max_value and num > schema.max_value then
|
||||
return false, string.format("Value too large (maximum: %s)", tostring(schema.max_value))
|
||||
end
|
||||
|
||||
return true, "Number validation passed"
|
||||
end
|
||||
|
||||
-- Validate boolean
|
||||
function M.validate_boolean(value, schema)
|
||||
local bool = value
|
||||
if type(value) ~= "boolean" then
|
||||
bool = type_utils.convert_to_boolean(tostring(value))
|
||||
if bool == nil then
|
||||
return false, "Value is not a boolean"
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Boolean validation passed"
|
||||
end
|
||||
|
||||
-- Validate date
|
||||
function M.validate_date(value, schema)
|
||||
local date_parser = require('notex.utils.date')
|
||||
local timestamp = date_parser.parse_date(tostring(value))
|
||||
|
||||
if not timestamp then
|
||||
return false, "Invalid date format"
|
||||
end
|
||||
|
||||
-- Range validation
|
||||
if schema.min_date then
|
||||
local min_timestamp = date_parser.parse_date(schema.min_date)
|
||||
if min_timestamp and timestamp < min_timestamp then
|
||||
return false, "Date is before minimum allowed date"
|
||||
end
|
||||
end
|
||||
|
||||
if schema.max_date then
|
||||
local max_timestamp = date_parser.parse_date(schema.max_date)
|
||||
if max_timestamp and timestamp > max_timestamp then
|
||||
return false, "Date is after maximum allowed date"
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Date validation passed"
|
||||
end
|
||||
|
||||
-- Validate email
|
||||
function M.validate_email(value, schema)
|
||||
local email = tostring(value)
|
||||
|
||||
if not email:match("^[^@]+@[^@]+%.[^@]+$") then
|
||||
return false, "Invalid email format"
|
||||
end
|
||||
|
||||
-- Domain whitelist validation
|
||||
if schema.domain_whitelist then
|
||||
local domain = email:match("@([^@]+)$")
|
||||
local found = false
|
||||
for _, allowed_domain in ipairs(schema.domain_whitelist) do
|
||||
if domain == allowed_domain then
|
||||
found = true
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if not found then
|
||||
return false, string.format("Domain not in whitelist: %s", domain)
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Email validation passed"
|
||||
end
|
||||
|
||||
-- Validate URL
|
||||
function M.validate_url(value, schema)
|
||||
local url = tostring(value)
|
||||
|
||||
-- Basic URL validation
|
||||
if not url:match("^https?://") then
|
||||
-- Check if scheme is required
|
||||
if schema.schemes and #schema.schemes > 0 then
|
||||
return false, string.format("URL must start with: %s", table.concat(schema.schemes, ", "))
|
||||
end
|
||||
end
|
||||
|
||||
-- Scheme validation
|
||||
if schema.schemes then
|
||||
local scheme = url:match("^(https?)://")
|
||||
if not scheme then
|
||||
return false, "Invalid URL scheme"
|
||||
end
|
||||
|
||||
local found = false
|
||||
for _, allowed_scheme in ipairs(schema.schemes) do
|
||||
if scheme == allowed_scheme then
|
||||
found = true
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if not found then
|
||||
return false, string.format("URL scheme not allowed: %s", scheme)
|
||||
end
|
||||
end
|
||||
|
||||
return true, "URL validation passed"
|
||||
end
|
||||
|
||||
-- Validate array
|
||||
function M.validate_array(value, schema)
|
||||
local arr = value
|
||||
|
||||
if type(arr) ~= "table" then
|
||||
-- Try to convert to array
|
||||
arr = type_utils.convert_to_array(tostring(value))
|
||||
if type(arr) ~= "table" then
|
||||
return false, "Value is not an array"
|
||||
end
|
||||
end
|
||||
|
||||
-- Length validation
|
||||
if schema.min_items and #arr < schema.min_items then
|
||||
return false, string.format("Array too short (min %d items)", schema.min_items)
|
||||
end
|
||||
|
||||
if schema.max_items and #arr > schema.max_items then
|
||||
return false, string.format("Array too long (max %d items)", schema.max_items)
|
||||
end
|
||||
|
||||
-- Item type validation
|
||||
if schema.item_type then
|
||||
for i, item in ipairs(arr) do
|
||||
local valid, error = M.validate_value(item, {
|
||||
type = schema.item_type,
|
||||
required = false
|
||||
})
|
||||
|
||||
if not valid then
|
||||
return false, string.format("Array item %d invalid: %s", i, error)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Array validation passed"
|
||||
end
|
||||
|
||||
-- Validate object
|
||||
function M.validate_object(value, schema)
|
||||
local obj = value
|
||||
|
||||
if type(obj) ~= "table" then
|
||||
return false, "Value is not an object"
|
||||
end
|
||||
|
||||
-- Required fields validation
|
||||
for _, field in ipairs(schema.required_fields or {}) do
|
||||
if obj[field] == nil then
|
||||
return false, string.format("Required field missing: %s", field)
|
||||
end
|
||||
end
|
||||
|
||||
-- Field validation
|
||||
local all_fields = {}
|
||||
for field, _ in pairs(obj) do
|
||||
table.insert(all_fields, field)
|
||||
end
|
||||
|
||||
for _, field in ipairs(all_fields) do
|
||||
local field_schema = schema.field_types and schema.field_types[field]
|
||||
if field_schema then
|
||||
local valid, error = M.validate_value(obj[field], field_schema)
|
||||
if not valid then
|
||||
return false, string.format("Field '%s' invalid: %s", field, error)
|
||||
end
|
||||
elseif schema.strict then
|
||||
return false, string.format("Unexpected field '%s' (strict mode)", field)
|
||||
end
|
||||
end
|
||||
|
||||
return true, "Object validation passed"
|
||||
end
|
||||
|
||||
-- Validate document schema
|
||||
function M.validate_document_properties(properties, schema_definition)
|
||||
local errors = {}
|
||||
local warnings = {}
|
||||
|
||||
if not properties then
|
||||
return false, {"No properties provided"}
|
||||
end
|
||||
|
||||
-- Validate each property against schema
|
||||
for prop_name, prop_value in pairs(properties) do
|
||||
local prop_schema = schema_definition[prop_name]
|
||||
if prop_schema then
|
||||
local valid, error = M.validate_value(prop_value, prop_schema)
|
||||
if not valid then
|
||||
table.insert(errors, string.format("Property '%s': %s", prop_name, error))
|
||||
end
|
||||
else
|
||||
table.insert(warnings, string.format("Unknown property '%s'", prop_name))
|
||||
end
|
||||
end
|
||||
|
||||
-- Check for missing required properties
|
||||
for prop_name, prop_schema in pairs(schema_definition) do
|
||||
if prop_schema.required and properties[prop_name] == nil then
|
||||
table.insert(errors, string.format("Missing required property: %s", prop_name))
|
||||
end
|
||||
end
|
||||
|
||||
return #errors == 0, {errors = errors, warnings = warnings}
|
||||
end
|
||||
|
||||
-- Create validation schema
|
||||
function M.create_schema(field_name, options)
|
||||
options = options or {}
|
||||
local base_schema = VALIDATION_RULES[options.type] or VALIDATION_RULES.string
|
||||
|
||||
local schema = vim.tbl_deep_extend("force", base_schema, options)
|
||||
schema.field_name = field_name
|
||||
|
||||
return schema
|
||||
end
|
||||
|
||||
-- Validate query parameters
|
||||
function M.validate_query_params(params, allowed_params)
|
||||
local errors = {}
|
||||
|
||||
for param_name, param_value in pairs(params) do
|
||||
if not allowed_params[param_name] then
|
||||
table.insert(errors, string.format("Unknown parameter: %s", param_name))
|
||||
end
|
||||
end
|
||||
|
||||
for param_name, param_schema in pairs(allowed_params) do
|
||||
if params[param_name] == nil and param_schema.required then
|
||||
table.insert(errors, string.format("Required parameter missing: %s", param_name))
|
||||
elseif params[param_name] ~= nil then
|
||||
local valid, error = M.validate_value(params[param_name], param_schema)
|
||||
if not valid then
|
||||
table.insert(errors, string.format("Parameter '%s': %s", param_name, error))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return #errors == 0, errors
|
||||
end
|
||||
|
||||
-- Sanitize input
|
||||
function M.sanitize_input(value, options)
|
||||
options = options or {}
|
||||
local max_length = options.max_length or 1000
|
||||
|
||||
if not value then
|
||||
return nil
|
||||
end
|
||||
|
||||
local sanitized = tostring(value)
|
||||
|
||||
-- Remove potentially dangerous characters
|
||||
sanitized = sanitized:gsub("[<>\"'&]", "")
|
||||
|
||||
-- Trim whitespace
|
||||
sanitized = sanitized:trim()
|
||||
|
||||
-- Limit length
|
||||
if #sanitized > max_length then
|
||||
sanitized = sanitized:sub(1, max_length)
|
||||
end
|
||||
|
||||
return sanitized
|
||||
end
|
||||
|
||||
-- Validate file path
|
||||
function M.validate_file_path(path)
|
||||
if not path or path == "" then
|
||||
return false, "Empty file path"
|
||||
end
|
||||
|
||||
-- Check for invalid characters
|
||||
if path:match('[<>:"|?*]') then
|
||||
return false, "Invalid characters in file path"
|
||||
end
|
||||
|
||||
-- Check for directory traversal
|
||||
if path:match("%.%.") then
|
||||
return false, "Directory traversal not allowed"
|
||||
end
|
||||
|
||||
return true, "File path is valid"
|
||||
end
|
||||
|
||||
-- Return validation summary
|
||||
function M.create_validation_summary(results)
|
||||
local summary = {
|
||||
total = #results,
|
||||
valid = 0,
|
||||
invalid = 0,
|
||||
errors = {},
|
||||
warnings = {}
|
||||
}
|
||||
|
||||
for _, result in ipairs(results) do
|
||||
if result.valid then
|
||||
summary.valid = summary.valid + 1
|
||||
else
|
||||
summary.invalid = summary.invalid + 1
|
||||
|
||||
if result.errors then
|
||||
for _, error in ipairs(result.errors) do
|
||||
table.insert(summary.errors, error)
|
||||
end
|
||||
end
|
||||
|
||||
if result.warnings then
|
||||
for _, warning in ipairs(result.warnings) do
|
||||
table.insert(summary.warnings, warning)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return summary
|
||||
end
|
||||
|
||||
return M
|
Loading…
Add table
Add a link
Reference in a new issue