Initial vibecoded proof of concept
This commit is contained in:
parent
74812459af
commit
461318a656
61 changed files with 13306 additions and 0 deletions
374
lua/notex/query/executor.lua
Normal file
374
lua/notex/query/executor.lua
Normal file
|
@ -0,0 +1,374 @@
|
|||
-- Query execution engine module
|
||||
local M = {}
|
||||
|
||||
local database = require('notex.database.init')
|
||||
local query_builder = require('notex.query.builder')
|
||||
local query_parser = require('notex.query.parser')
|
||||
local utils = require('notex.utils')
|
||||
|
||||
-- Execute parsed query
|
||||
function M.execute(parsed_query, options)
|
||||
options = options or {}
|
||||
local start_time = vim.loop.hrtime()
|
||||
|
||||
local result = {
|
||||
documents = {},
|
||||
total_count = 0,
|
||||
execution_time_ms = 0,
|
||||
query_hash = "",
|
||||
success = false,
|
||||
errors = {},
|
||||
metadata = {}
|
||||
}
|
||||
|
||||
-- Validate parsed query
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
result.errors = parsed_query.parse_errors
|
||||
result.error_type = "parse_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Generate query hash
|
||||
result.query_hash = query_parser.generate_query_hash(parsed_query)
|
||||
|
||||
-- Build SQL query
|
||||
local sql, params = query_builder.build_sql(parsed_query, options)
|
||||
if not sql then
|
||||
table.insert(result.errors, "Failed to build SQL query")
|
||||
result.error_type = "build_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Validate SQL
|
||||
local valid, validation_error = query_builder.validate_sql(sql)
|
||||
if not valid then
|
||||
table.insert(result.errors, validation_error)
|
||||
result.error_type = "validation_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Execute query
|
||||
local ok, query_result = database.execute(sql, params)
|
||||
if not ok then
|
||||
table.insert(result.errors, "Query execution failed: " .. query_result)
|
||||
result.error_type = "execution_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Process results
|
||||
local processed_results = M.process_query_results(query_result, parsed_query, options)
|
||||
result.documents = processed_results.documents
|
||||
result.metadata = processed_results.metadata
|
||||
|
||||
-- Get total count
|
||||
result.total_count = M.get_total_count(parsed_query, options)
|
||||
|
||||
-- Calculate execution time
|
||||
local end_time = vim.loop.hrtime()
|
||||
result.execution_time_ms = (end_time - start_time) / 1e6
|
||||
|
||||
result.success = true
|
||||
|
||||
-- Log slow queries
|
||||
if result.execution_time_ms > 100 then
|
||||
utils.log("WARN", string.format("Slow query detected: %.2fms", result.execution_time_ms), {
|
||||
query_hash = result.query_hash,
|
||||
document_count = #result.documents
|
||||
})
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Process query results
|
||||
function M.process_query_results(raw_results, parsed_query, options)
|
||||
local documents = {}
|
||||
local metadata = {
|
||||
properties_found = {},
|
||||
aggregation_results = {}
|
||||
}
|
||||
|
||||
-- Group results by document if we have properties
|
||||
local documents_by_id = {}
|
||||
for _, row in ipairs(raw_results) do
|
||||
local doc_id = row.id
|
||||
|
||||
if not documents_by_id[doc_id] then
|
||||
documents_by_id[doc_id] = {
|
||||
id = doc_id,
|
||||
file_path = row.file_path,
|
||||
content_hash = row.content_hash,
|
||||
last_modified = row.last_modified,
|
||||
created_at = row.created_at,
|
||||
updated_at = row.updated_at,
|
||||
properties = {}
|
||||
}
|
||||
end
|
||||
|
||||
-- Add properties from result row
|
||||
for key, value in pairs(row) do
|
||||
if key ~= "id" and key ~= "file_path" and key ~= "content_hash" and
|
||||
key ~= "last_modified" and key ~= "created_at" and key ~= "updated_at" then
|
||||
if value and value ~= "" then
|
||||
documents_by_id[doc_id].properties[key] = value
|
||||
metadata.properties_found[key] = true
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert to array
|
||||
for _, doc in pairs(documents_by_id) do
|
||||
table.insert(documents, doc)
|
||||
end
|
||||
|
||||
-- Apply post-processing filters
|
||||
documents = M.apply_post_filters(documents, parsed_query, options)
|
||||
|
||||
-- Apply sorting if not handled by SQL
|
||||
if parsed_query.order_by and parsed_query.order_by.field == "relevance" then
|
||||
documents = M.sort_by_relevance(documents, parsed_query)
|
||||
end
|
||||
|
||||
return {
|
||||
documents = documents,
|
||||
metadata = metadata
|
||||
}
|
||||
end
|
||||
|
||||
-- Apply post-processing filters
|
||||
function M.apply_post_filters(documents, parsed_query, options)
|
||||
local filtered = documents
|
||||
|
||||
-- Apply text search highlighting if requested
|
||||
if options.highlight and parsed_query.conditions then
|
||||
filtered = M.apply_text_highlighting(filtered, parsed_query)
|
||||
end
|
||||
|
||||
-- Apply additional filters that couldn't be handled by SQL
|
||||
filtered = M.apply_complex_filters(filtered, parsed_query)
|
||||
|
||||
return filtered
|
||||
end
|
||||
|
||||
-- Apply text highlighting
|
||||
function M.apply_text_highlighting(documents, parsed_query)
|
||||
-- Implementation for text highlighting
|
||||
-- This would mark matching text in document properties
|
||||
return documents
|
||||
end
|
||||
|
||||
-- Apply complex filters
|
||||
function M.apply_complex_filters(documents, parsed_query)
|
||||
local filtered = {}
|
||||
|
||||
for _, doc in ipairs(documents) do
|
||||
local include = true
|
||||
|
||||
-- Apply any complex filter logic here
|
||||
if include then
|
||||
table.insert(filtered, doc)
|
||||
end
|
||||
end
|
||||
|
||||
return filtered
|
||||
end
|
||||
|
||||
-- Sort by relevance
|
||||
function M.sort_by_relevance(documents, parsed_query)
|
||||
-- Simple relevance scoring based on filter matches
|
||||
local scored = {}
|
||||
|
||||
for _, doc in ipairs(documents) do
|
||||
local score = 0
|
||||
|
||||
-- Score based on filter matches
|
||||
for field, _ in pairs(parsed_query.filters) do
|
||||
if doc.properties[field] then
|
||||
score = score + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- Add document with score
|
||||
table.insert(scored, {
|
||||
document = doc,
|
||||
score = score
|
||||
})
|
||||
end
|
||||
|
||||
-- Sort by score (descending)
|
||||
table.sort(scored, function(a, b) return a.score > b.score end)
|
||||
|
||||
-- Extract documents
|
||||
local sorted_documents = {}
|
||||
for _, item in ipairs(scored) do
|
||||
table.insert(sorted_documents, item.document)
|
||||
end
|
||||
|
||||
return sorted_documents
|
||||
end
|
||||
|
||||
-- Get total count for query
|
||||
function M.get_total_count(parsed_query, options)
|
||||
local count_sql, count_params = query_builder.build_count_query(parsed_query, options)
|
||||
|
||||
local ok, count_result = database.execute(count_sql, count_params)
|
||||
if not ok then
|
||||
utils.log("ERROR", "Failed to get total count", { error = count_result })
|
||||
return 0
|
||||
end
|
||||
|
||||
return count_result[1] and count_result[1].total_count or 0
|
||||
end
|
||||
|
||||
-- Execute query with caching
|
||||
function M.execute_cached(parsed_query, options)
|
||||
options = options or {}
|
||||
local cache_enabled = options.cache ~= false
|
||||
|
||||
if not cache_enabled then
|
||||
return M.execute(parsed_query, options)
|
||||
end
|
||||
|
||||
local query_hash = query_parser.generate_query_hash(parsed_query)
|
||||
local cache_key = "query:" .. query_hash
|
||||
|
||||
-- Check cache (implementation would depend on cache system)
|
||||
-- For now, just execute directly
|
||||
return M.execute(parsed_query, options)
|
||||
end
|
||||
|
||||
-- Validate query before execution
|
||||
function M.validate_query(parsed_query)
|
||||
local errors = {}
|
||||
|
||||
-- Check for required fields
|
||||
if not parsed_query or not parsed_query.filters then
|
||||
table.insert(errors, "Query must have filters")
|
||||
end
|
||||
|
||||
-- Validate filter values
|
||||
for field, value in pairs(parsed_query.filters or {}) do
|
||||
if not M.is_valid_filter_value(value) then
|
||||
table.insert(errors, string.format("Invalid filter value for field '%s'", field))
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate conditions
|
||||
if parsed_query.conditions then
|
||||
M.validate_conditions_recursive(parsed_query.conditions, errors)
|
||||
end
|
||||
|
||||
return #errors == 0, errors
|
||||
end
|
||||
|
||||
-- Check if filter value is valid
|
||||
function M.is_valid_filter_value(value)
|
||||
if type(value) == "string" and #value > 1000 then
|
||||
return false
|
||||
end
|
||||
|
||||
if type(value) == "table" and #value > 100 then
|
||||
return false
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Validate conditions recursively
|
||||
function M.validate_conditions_recursive(conditions, errors)
|
||||
if conditions.type == "comparison" then
|
||||
if not conditions.field or not conditions.operator or conditions.value == nil then
|
||||
table.insert(errors, "Invalid comparison condition")
|
||||
end
|
||||
elseif conditions.type == "existence" then
|
||||
if not conditions.field then
|
||||
table.insert(errors, "Invalid existence condition")
|
||||
end
|
||||
elseif conditions.clauses then
|
||||
for _, clause in ipairs(conditions.clauses) do
|
||||
M.validate_conditions_recursive(clause, errors)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Get query suggestions
|
||||
function M.get_suggestions(partial_query, options)
|
||||
local suggestions = {
|
||||
properties = {},
|
||||
values = {},
|
||||
operators = {}
|
||||
}
|
||||
|
||||
-- Get property suggestions from schema
|
||||
local ok, schema_result = database.execute("SELECT DISTINCT property_key FROM schema_metadata ORDER BY document_count DESC LIMIT 20")
|
||||
if ok then
|
||||
for _, row in ipairs(schema_result) do
|
||||
table.insert(suggestions.properties, row.property_key)
|
||||
end
|
||||
end
|
||||
|
||||
-- Get value suggestions for common properties
|
||||
local common_properties = {"status", "priority", "tags", "type"}
|
||||
for _, prop in ipairs(common_properties) do
|
||||
local ok, values_result = database.execute(
|
||||
"SELECT DISTINCT value FROM properties WHERE key = ? AND value_type = 'string' LIMIT 10",
|
||||
{ prop }
|
||||
)
|
||||
if ok then
|
||||
suggestions.values[prop] = {}
|
||||
for _, row in ipairs(values_result) do
|
||||
table.insert(suggestions.values[prop], row.value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Common operators
|
||||
suggestions.operators = {"=", "!=", ">", "<", ">=", "<=", "CONTAINS", "STARTS_WITH", "ENDS_WITH", "INCLUDES"}
|
||||
|
||||
return suggestions
|
||||
end
|
||||
|
||||
-- Explain query execution plan
|
||||
function M.explain_query(parsed_query, options)
|
||||
local sql, params = query_builder.build_sql(parsed_query, options)
|
||||
|
||||
local explain_sql = "EXPLAIN QUERY PLAN " .. sql
|
||||
local ok, explain_result = database.execute(explain_sql, params)
|
||||
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error = explain_result,
|
||||
sql = sql
|
||||
}
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
sql = sql,
|
||||
params = params,
|
||||
plan = explain_result,
|
||||
estimated_cost = M.calculate_query_cost(explain_result)
|
||||
}
|
||||
end
|
||||
|
||||
-- Calculate query cost
|
||||
function M.calculate_query_cost(explain_result)
|
||||
local total_cost = 0
|
||||
|
||||
for _, row in ipairs(explain_result) do
|
||||
-- Simple cost calculation based on SQLite's EXPLAIN output
|
||||
if row.detail and row.detail:match("SCAN") then
|
||||
total_cost = total_cost + 10
|
||||
elseif row.detail and row.detail:match("SEARCH") then
|
||||
total_cost = total_cost + 2
|
||||
else
|
||||
total_cost = total_cost + 1
|
||||
end
|
||||
end
|
||||
|
||||
return total_cost
|
||||
end
|
||||
|
||||
return M
|
Loading…
Add table
Add a link
Reference in a new issue