Initial vibecoded proof of concept
This commit is contained in:
parent
74812459af
commit
461318a656
61 changed files with 13306 additions and 0 deletions
370
lua/notex/query/builder.lua
Normal file
370
lua/notex/query/builder.lua
Normal file
|
@ -0,0 +1,370 @@
|
|||
-- SQL query builder module
|
||||
local M = {}
|
||||
|
||||
local utils = require('notex.utils')
|
||||
|
||||
-- Build SQL query from parsed query object
|
||||
function M.build_sql(parsed_query, options)
|
||||
options = options or {}
|
||||
local query = {
|
||||
select = "",
|
||||
from = "",
|
||||
where = "",
|
||||
group_by = "",
|
||||
order_by = "",
|
||||
limit = "",
|
||||
params = {}
|
||||
}
|
||||
|
||||
-- Build SELECT clause
|
||||
query.select = M.build_select_clause(parsed_query, options)
|
||||
|
||||
-- Build FROM clause
|
||||
query.from = M.build_from_clause(parsed_query, options)
|
||||
|
||||
-- Build WHERE clause
|
||||
query.where = M.build_where_clause(parsed_query, options)
|
||||
|
||||
-- Build GROUP BY clause
|
||||
query.group_by = M.build_group_by_clause(parsed_query, options)
|
||||
|
||||
-- Build ORDER BY clause
|
||||
query.order_by = M.build_order_by_clause(parsed_query, options)
|
||||
|
||||
-- Build LIMIT clause
|
||||
query.limit = M.build_limit_clause(parsed_query, options)
|
||||
|
||||
-- Combine all clauses
|
||||
local sql = M.combine_clauses(query)
|
||||
|
||||
return sql, query.params
|
||||
end
|
||||
|
||||
-- Build SELECT clause
|
||||
function M.build_select_clause(parsed_query, options)
|
||||
local select_fields = {"d.*"}
|
||||
|
||||
-- Add property aggregates if needed
|
||||
if parsed_query.group_by then
|
||||
table.insert(select_fields, "COUNT(p.id) as document_count")
|
||||
table.insert(select_fields, "GROUP_CONCAT(p.value) as aggregated_values")
|
||||
else
|
||||
-- Add property values for filtering
|
||||
for field, _ in pairs(parsed_query.filters) do
|
||||
table.insert(select_fields, string.format("(SELECT p.value FROM properties p WHERE p.document_id = d.id AND p.key = '%s') as %s", field, field))
|
||||
end
|
||||
end
|
||||
|
||||
if options.count_only then
|
||||
select_fields = {"COUNT(DISTINCT d.id) as total_count"}
|
||||
end
|
||||
|
||||
return "SELECT " .. table.concat(select_fields, ", ")
|
||||
end
|
||||
|
||||
-- Build FROM clause
|
||||
function M.build_from_clause(parsed_query, options)
|
||||
local from_parts = {"documents d"}
|
||||
|
||||
-- Add properties join if we have filters or conditions
|
||||
if next(parsed_query.filters) ~= nil or parsed_query.conditions then
|
||||
table.insert(from_parts, "LEFT JOIN properties p ON d.id = p.document_id")
|
||||
end
|
||||
|
||||
return "FROM " .. table.concat(from_parts, " ")
|
||||
end
|
||||
|
||||
-- Build WHERE clause
|
||||
function M.build_where_clause(parsed_query, options)
|
||||
local where_conditions = {}
|
||||
local params = {}
|
||||
|
||||
-- Add filter conditions
|
||||
for field, value in pairs(parsed_query.filters) do
|
||||
local condition, param = M.build_filter_condition(field, value)
|
||||
table.insert(where_conditions, condition)
|
||||
if param then
|
||||
for key, val in pairs(param) do
|
||||
params[key] = val
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Add parsed conditions
|
||||
if parsed_query.conditions then
|
||||
local condition, param = M.build_conditions(parsed_query.conditions)
|
||||
if condition then
|
||||
table.insert(where_conditions, condition)
|
||||
if param then
|
||||
for key, val in pairs(param) do
|
||||
params[key] = val
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if #where_conditions == 0 then
|
||||
return "", {}
|
||||
end
|
||||
|
||||
return "WHERE " .. table.concat(where_conditions, " AND "), params
|
||||
end
|
||||
|
||||
-- Build filter condition
|
||||
function M.build_filter_condition(field, value)
|
||||
local param_name = field:gsub("[^%w]", "_") .. "_filter"
|
||||
|
||||
if type(value) == "table" then
|
||||
-- Handle array values
|
||||
local placeholders = {}
|
||||
for i = 1, #value do
|
||||
local item_param = param_name .. "_" .. i
|
||||
table.insert(placeholders, ":" .. item_param)
|
||||
end
|
||||
return string.format("(p.key = '%s' AND p.value IN (%s))", field, table.concat(placeholders, ", "))
|
||||
else
|
||||
-- Handle single value
|
||||
return string.format("(p.key = '%s' AND p.value = :%s)", field, param_name)
|
||||
end
|
||||
end
|
||||
|
||||
-- Build conditions from parsed condition tree
|
||||
function M.build_conditions(conditions)
|
||||
if conditions.type == "comparison" then
|
||||
return M.build_comparison_condition(conditions)
|
||||
elseif conditions.type == "existence" then
|
||||
return M.build_existence_condition(conditions)
|
||||
elseif conditions.clauses then
|
||||
return M.build_logical_condition(conditions)
|
||||
end
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
-- Build comparison condition
|
||||
function M.build_comparison_condition(condition)
|
||||
local field = condition.field
|
||||
local operator = condition.operator
|
||||
local value = condition.value
|
||||
local negated = condition.negated
|
||||
|
||||
local param_name = field:gsub("[^%w]", "_") .. "_comp"
|
||||
local sql_condition
|
||||
local params = {}
|
||||
|
||||
-- Handle special operators
|
||||
if operator == "CONTAINS" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value LIKE :%s", field, param_name)
|
||||
params[param_name] = "%" .. value .. "%"
|
||||
elseif operator == "STARTS_WITH" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value LIKE :%s", field, param_name)
|
||||
params[param_name] = value .. "%"
|
||||
elseif operator == "ENDS_WITH" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value LIKE :%s", field, param_name)
|
||||
params[param_name] = "%" .. value
|
||||
elseif operator == "INCLUDES" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value LIKE :%s", field, param_name)
|
||||
params[param_name] "%" .. value .. "%"
|
||||
elseif operator == "BEFORE" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value < :%s", field, param_name)
|
||||
params[param_name] = value
|
||||
elseif operator == "AFTER" then
|
||||
sql_condition = string.format("p.key = '%s' AND p.value > :%s", field, param_name)
|
||||
params[param_name] = value
|
||||
elseif operator == "WITHIN" then
|
||||
-- Handle relative time
|
||||
if type(value) == "table" and value.type == "relative_time" then
|
||||
local time_value = M.calculate_relative_time(value)
|
||||
sql_condition = string.format("p.key = '%s' AND p.value >= :%s", field, param_name)
|
||||
params[param_name] = time_value
|
||||
else
|
||||
sql_condition = string.format("p.key = '%s' AND p.value >= :%s", field, param_name)
|
||||
params[param_name] = value
|
||||
end
|
||||
else
|
||||
-- Handle standard comparison operators
|
||||
local op_map = {
|
||||
["="] = "=",
|
||||
["!="] = "!=",
|
||||
[">"] = ">",
|
||||
["<"] = "<",
|
||||
[">="] = ">=",
|
||||
["<="] = "<="
|
||||
}
|
||||
|
||||
local sql_op = op_map[operator] or "="
|
||||
sql_condition = string.format("p.key = '%s' AND p.value %s :%s", field, sql_op, param_name)
|
||||
params[param_name] = value
|
||||
end
|
||||
|
||||
if negated then
|
||||
sql_condition = "NOT (" .. sql_condition .. ")"
|
||||
end
|
||||
|
||||
return sql_condition, params
|
||||
end
|
||||
|
||||
-- Build existence condition
|
||||
function M.build_existence_condition(condition)
|
||||
local field = condition.field
|
||||
local negated = condition.negated
|
||||
|
||||
local sql_condition = string.format("EXISTS (SELECT 1 FROM properties p2 WHERE p2.document_id = d.id AND p2.key = '%s')", field)
|
||||
|
||||
if negated then
|
||||
sql_condition = "NOT " .. sql_condition
|
||||
end
|
||||
|
||||
return sql_condition, {}
|
||||
end
|
||||
|
||||
-- Build logical condition (AND/OR)
|
||||
function M.build_logical_condition(conditions)
|
||||
local clause_parts = {}
|
||||
local all_params = {}
|
||||
|
||||
for _, clause in ipairs(conditions.clauses) do
|
||||
local clause_sql, clause_params = M.build_conditions(clause)
|
||||
if clause_sql then
|
||||
table.insert(clause_parts, clause_sql)
|
||||
if clause_params then
|
||||
for key, value in pairs(clause_params) do
|
||||
all_params[key] = value
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if #clause_parts == 0 then
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
local logical_op = conditions.type:upper()
|
||||
local sql_condition = "(" .. table.concat(clause_parts, " " .. logical_op .. " ") .. ")"
|
||||
|
||||
return sql_condition, all_params
|
||||
end
|
||||
|
||||
-- Build GROUP BY clause
|
||||
function M.build_group_by_clause(parsed_query, options)
|
||||
if not parsed_query.group_by then
|
||||
return ""
|
||||
end
|
||||
|
||||
local group_fields = {}
|
||||
|
||||
if parsed_query.group_by == "property_key" then
|
||||
table.insert(group_fields, "p.key")
|
||||
else
|
||||
table.insert(group_fields, "p." .. parsed_query.group_by)
|
||||
end
|
||||
|
||||
return "GROUP BY " .. table.concat(group_fields, ", ")
|
||||
end
|
||||
|
||||
-- Build ORDER BY clause
|
||||
function M.build_order_by_clause(parsed_query, options)
|
||||
if not parsed_query.order_by then
|
||||
return "ORDER BY d.updated_at DESC"
|
||||
end
|
||||
|
||||
local field = parsed_query.order_by.field
|
||||
local direction = parsed_query.order_by.direction or "ASC"
|
||||
|
||||
-- Map field names to columns
|
||||
local field_map = {
|
||||
created_at = "d.created_at",
|
||||
updated_at = "d.updated_at",
|
||||
file_path = "d.file_path",
|
||||
title = "CASE WHEN p.key = 'title' THEN p.value END"
|
||||
}
|
||||
|
||||
local column = field_map[field] or "p." .. field
|
||||
|
||||
return string.format("ORDER BY %s %s", column, direction)
|
||||
end
|
||||
|
||||
-- Build LIMIT clause
|
||||
function M.build_limit_clause(parsed_query, options)
|
||||
if not parsed_query.limit then
|
||||
return ""
|
||||
end
|
||||
|
||||
return "LIMIT " .. parsed_query.limit
|
||||
end
|
||||
|
||||
-- Combine all SQL clauses
|
||||
function M.combine_clauses(query)
|
||||
local parts = {}
|
||||
|
||||
table.insert(parts, query.select)
|
||||
table.insert(parts, query.from)
|
||||
|
||||
if query.where ~= "" then
|
||||
table.insert(parts, query.where)
|
||||
end
|
||||
|
||||
if query.group_by ~= "" then
|
||||
table.insert(parts, query.group_by)
|
||||
end
|
||||
|
||||
if query.order_by ~= "" then
|
||||
table.insert(parts, query.order_by)
|
||||
end
|
||||
|
||||
if query.limit ~= "" then
|
||||
table.insert(parts, query.limit)
|
||||
end
|
||||
|
||||
return table.concat(parts, "\n")
|
||||
end
|
||||
|
||||
-- Calculate relative time
|
||||
function M.calculate_relative_time(relative_time)
|
||||
local current_time = os.time()
|
||||
local amount = relative_time.amount
|
||||
local unit = relative_time.unit
|
||||
|
||||
local seconds = 0
|
||||
|
||||
if unit == "s" then
|
||||
seconds = amount
|
||||
elseif unit == "m" then
|
||||
seconds = amount * 60
|
||||
elseif unit == "h" then
|
||||
seconds = amount * 3600
|
||||
elseif unit == "d" then
|
||||
seconds = amount * 86400
|
||||
elseif unit == "w" then
|
||||
seconds = amount * 604800
|
||||
elseif unit == "m" then -- month (approximate)
|
||||
seconds = amount * 2592000
|
||||
elseif unit == "y" then -- year (approximate)
|
||||
seconds = amount * 31536000
|
||||
end
|
||||
|
||||
return os.date("%Y-%m-%d", current_time - seconds)
|
||||
end
|
||||
|
||||
-- Build count query
|
||||
function M.build_count_query(parsed_query)
|
||||
local options = { count_only = true }
|
||||
local sql, params = M.build_sql(parsed_query, options)
|
||||
return sql, params
|
||||
end
|
||||
|
||||
-- Validate built SQL
|
||||
function M.validate_sql(sql)
|
||||
if not sql or sql == "" then
|
||||
return false, "Empty SQL query"
|
||||
end
|
||||
|
||||
-- Basic SQL injection prevention
|
||||
if sql:match(";") or sql:match("DROP") or sql:match("DELETE") or sql:match("UPDATE") or sql:match("INSERT") then
|
||||
return false, "Potentially unsafe SQL detected"
|
||||
end
|
||||
|
||||
return true, "SQL query is valid"
|
||||
end
|
||||
|
||||
return M
|
374
lua/notex/query/executor.lua
Normal file
374
lua/notex/query/executor.lua
Normal file
|
@ -0,0 +1,374 @@
|
|||
-- Query execution engine module
|
||||
local M = {}
|
||||
|
||||
local database = require('notex.database.init')
|
||||
local query_builder = require('notex.query.builder')
|
||||
local query_parser = require('notex.query.parser')
|
||||
local utils = require('notex.utils')
|
||||
|
||||
-- Execute parsed query
|
||||
function M.execute(parsed_query, options)
|
||||
options = options or {}
|
||||
local start_time = vim.loop.hrtime()
|
||||
|
||||
local result = {
|
||||
documents = {},
|
||||
total_count = 0,
|
||||
execution_time_ms = 0,
|
||||
query_hash = "",
|
||||
success = false,
|
||||
errors = {},
|
||||
metadata = {}
|
||||
}
|
||||
|
||||
-- Validate parsed query
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
result.errors = parsed_query.parse_errors
|
||||
result.error_type = "parse_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Generate query hash
|
||||
result.query_hash = query_parser.generate_query_hash(parsed_query)
|
||||
|
||||
-- Build SQL query
|
||||
local sql, params = query_builder.build_sql(parsed_query, options)
|
||||
if not sql then
|
||||
table.insert(result.errors, "Failed to build SQL query")
|
||||
result.error_type = "build_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Validate SQL
|
||||
local valid, validation_error = query_builder.validate_sql(sql)
|
||||
if not valid then
|
||||
table.insert(result.errors, validation_error)
|
||||
result.error_type = "validation_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Execute query
|
||||
local ok, query_result = database.execute(sql, params)
|
||||
if not ok then
|
||||
table.insert(result.errors, "Query execution failed: " .. query_result)
|
||||
result.error_type = "execution_error"
|
||||
return result
|
||||
end
|
||||
|
||||
-- Process results
|
||||
local processed_results = M.process_query_results(query_result, parsed_query, options)
|
||||
result.documents = processed_results.documents
|
||||
result.metadata = processed_results.metadata
|
||||
|
||||
-- Get total count
|
||||
result.total_count = M.get_total_count(parsed_query, options)
|
||||
|
||||
-- Calculate execution time
|
||||
local end_time = vim.loop.hrtime()
|
||||
result.execution_time_ms = (end_time - start_time) / 1e6
|
||||
|
||||
result.success = true
|
||||
|
||||
-- Log slow queries
|
||||
if result.execution_time_ms > 100 then
|
||||
utils.log("WARN", string.format("Slow query detected: %.2fms", result.execution_time_ms), {
|
||||
query_hash = result.query_hash,
|
||||
document_count = #result.documents
|
||||
})
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Process query results
|
||||
function M.process_query_results(raw_results, parsed_query, options)
|
||||
local documents = {}
|
||||
local metadata = {
|
||||
properties_found = {},
|
||||
aggregation_results = {}
|
||||
}
|
||||
|
||||
-- Group results by document if we have properties
|
||||
local documents_by_id = {}
|
||||
for _, row in ipairs(raw_results) do
|
||||
local doc_id = row.id
|
||||
|
||||
if not documents_by_id[doc_id] then
|
||||
documents_by_id[doc_id] = {
|
||||
id = doc_id,
|
||||
file_path = row.file_path,
|
||||
content_hash = row.content_hash,
|
||||
last_modified = row.last_modified,
|
||||
created_at = row.created_at,
|
||||
updated_at = row.updated_at,
|
||||
properties = {}
|
||||
}
|
||||
end
|
||||
|
||||
-- Add properties from result row
|
||||
for key, value in pairs(row) do
|
||||
if key ~= "id" and key ~= "file_path" and key ~= "content_hash" and
|
||||
key ~= "last_modified" and key ~= "created_at" and key ~= "updated_at" then
|
||||
if value and value ~= "" then
|
||||
documents_by_id[doc_id].properties[key] = value
|
||||
metadata.properties_found[key] = true
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert to array
|
||||
for _, doc in pairs(documents_by_id) do
|
||||
table.insert(documents, doc)
|
||||
end
|
||||
|
||||
-- Apply post-processing filters
|
||||
documents = M.apply_post_filters(documents, parsed_query, options)
|
||||
|
||||
-- Apply sorting if not handled by SQL
|
||||
if parsed_query.order_by and parsed_query.order_by.field == "relevance" then
|
||||
documents = M.sort_by_relevance(documents, parsed_query)
|
||||
end
|
||||
|
||||
return {
|
||||
documents = documents,
|
||||
metadata = metadata
|
||||
}
|
||||
end
|
||||
|
||||
-- Apply post-processing filters
|
||||
function M.apply_post_filters(documents, parsed_query, options)
|
||||
local filtered = documents
|
||||
|
||||
-- Apply text search highlighting if requested
|
||||
if options.highlight and parsed_query.conditions then
|
||||
filtered = M.apply_text_highlighting(filtered, parsed_query)
|
||||
end
|
||||
|
||||
-- Apply additional filters that couldn't be handled by SQL
|
||||
filtered = M.apply_complex_filters(filtered, parsed_query)
|
||||
|
||||
return filtered
|
||||
end
|
||||
|
||||
-- Apply text highlighting
|
||||
function M.apply_text_highlighting(documents, parsed_query)
|
||||
-- Implementation for text highlighting
|
||||
-- This would mark matching text in document properties
|
||||
return documents
|
||||
end
|
||||
|
||||
-- Apply complex filters
|
||||
function M.apply_complex_filters(documents, parsed_query)
|
||||
local filtered = {}
|
||||
|
||||
for _, doc in ipairs(documents) do
|
||||
local include = true
|
||||
|
||||
-- Apply any complex filter logic here
|
||||
if include then
|
||||
table.insert(filtered, doc)
|
||||
end
|
||||
end
|
||||
|
||||
return filtered
|
||||
end
|
||||
|
||||
-- Sort by relevance
|
||||
function M.sort_by_relevance(documents, parsed_query)
|
||||
-- Simple relevance scoring based on filter matches
|
||||
local scored = {}
|
||||
|
||||
for _, doc in ipairs(documents) do
|
||||
local score = 0
|
||||
|
||||
-- Score based on filter matches
|
||||
for field, _ in pairs(parsed_query.filters) do
|
||||
if doc.properties[field] then
|
||||
score = score + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- Add document with score
|
||||
table.insert(scored, {
|
||||
document = doc,
|
||||
score = score
|
||||
})
|
||||
end
|
||||
|
||||
-- Sort by score (descending)
|
||||
table.sort(scored, function(a, b) return a.score > b.score end)
|
||||
|
||||
-- Extract documents
|
||||
local sorted_documents = {}
|
||||
for _, item in ipairs(scored) do
|
||||
table.insert(sorted_documents, item.document)
|
||||
end
|
||||
|
||||
return sorted_documents
|
||||
end
|
||||
|
||||
-- Get total count for query
|
||||
function M.get_total_count(parsed_query, options)
|
||||
local count_sql, count_params = query_builder.build_count_query(parsed_query, options)
|
||||
|
||||
local ok, count_result = database.execute(count_sql, count_params)
|
||||
if not ok then
|
||||
utils.log("ERROR", "Failed to get total count", { error = count_result })
|
||||
return 0
|
||||
end
|
||||
|
||||
return count_result[1] and count_result[1].total_count or 0
|
||||
end
|
||||
|
||||
-- Execute query with caching
|
||||
function M.execute_cached(parsed_query, options)
|
||||
options = options or {}
|
||||
local cache_enabled = options.cache ~= false
|
||||
|
||||
if not cache_enabled then
|
||||
return M.execute(parsed_query, options)
|
||||
end
|
||||
|
||||
local query_hash = query_parser.generate_query_hash(parsed_query)
|
||||
local cache_key = "query:" .. query_hash
|
||||
|
||||
-- Check cache (implementation would depend on cache system)
|
||||
-- For now, just execute directly
|
||||
return M.execute(parsed_query, options)
|
||||
end
|
||||
|
||||
-- Validate query before execution
|
||||
function M.validate_query(parsed_query)
|
||||
local errors = {}
|
||||
|
||||
-- Check for required fields
|
||||
if not parsed_query or not parsed_query.filters then
|
||||
table.insert(errors, "Query must have filters")
|
||||
end
|
||||
|
||||
-- Validate filter values
|
||||
for field, value in pairs(parsed_query.filters or {}) do
|
||||
if not M.is_valid_filter_value(value) then
|
||||
table.insert(errors, string.format("Invalid filter value for field '%s'", field))
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate conditions
|
||||
if parsed_query.conditions then
|
||||
M.validate_conditions_recursive(parsed_query.conditions, errors)
|
||||
end
|
||||
|
||||
return #errors == 0, errors
|
||||
end
|
||||
|
||||
-- Check if filter value is valid
|
||||
function M.is_valid_filter_value(value)
|
||||
if type(value) == "string" and #value > 1000 then
|
||||
return false
|
||||
end
|
||||
|
||||
if type(value) == "table" and #value > 100 then
|
||||
return false
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Validate conditions recursively
|
||||
function M.validate_conditions_recursive(conditions, errors)
|
||||
if conditions.type == "comparison" then
|
||||
if not conditions.field or not conditions.operator or conditions.value == nil then
|
||||
table.insert(errors, "Invalid comparison condition")
|
||||
end
|
||||
elseif conditions.type == "existence" then
|
||||
if not conditions.field then
|
||||
table.insert(errors, "Invalid existence condition")
|
||||
end
|
||||
elseif conditions.clauses then
|
||||
for _, clause in ipairs(conditions.clauses) do
|
||||
M.validate_conditions_recursive(clause, errors)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Get query suggestions
|
||||
function M.get_suggestions(partial_query, options)
|
||||
local suggestions = {
|
||||
properties = {},
|
||||
values = {},
|
||||
operators = {}
|
||||
}
|
||||
|
||||
-- Get property suggestions from schema
|
||||
local ok, schema_result = database.execute("SELECT DISTINCT property_key FROM schema_metadata ORDER BY document_count DESC LIMIT 20")
|
||||
if ok then
|
||||
for _, row in ipairs(schema_result) do
|
||||
table.insert(suggestions.properties, row.property_key)
|
||||
end
|
||||
end
|
||||
|
||||
-- Get value suggestions for common properties
|
||||
local common_properties = {"status", "priority", "tags", "type"}
|
||||
for _, prop in ipairs(common_properties) do
|
||||
local ok, values_result = database.execute(
|
||||
"SELECT DISTINCT value FROM properties WHERE key = ? AND value_type = 'string' LIMIT 10",
|
||||
{ prop }
|
||||
)
|
||||
if ok then
|
||||
suggestions.values[prop] = {}
|
||||
for _, row in ipairs(values_result) do
|
||||
table.insert(suggestions.values[prop], row.value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Common operators
|
||||
suggestions.operators = {"=", "!=", ">", "<", ">=", "<=", "CONTAINS", "STARTS_WITH", "ENDS_WITH", "INCLUDES"}
|
||||
|
||||
return suggestions
|
||||
end
|
||||
|
||||
-- Explain query execution plan
|
||||
function M.explain_query(parsed_query, options)
|
||||
local sql, params = query_builder.build_sql(parsed_query, options)
|
||||
|
||||
local explain_sql = "EXPLAIN QUERY PLAN " .. sql
|
||||
local ok, explain_result = database.execute(explain_sql, params)
|
||||
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error = explain_result,
|
||||
sql = sql
|
||||
}
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
sql = sql,
|
||||
params = params,
|
||||
plan = explain_result,
|
||||
estimated_cost = M.calculate_query_cost(explain_result)
|
||||
}
|
||||
end
|
||||
|
||||
-- Calculate query cost
|
||||
function M.calculate_query_cost(explain_result)
|
||||
local total_cost = 0
|
||||
|
||||
for _, row in ipairs(explain_result) do
|
||||
-- Simple cost calculation based on SQLite's EXPLAIN output
|
||||
if row.detail and row.detail:match("SCAN") then
|
||||
total_cost = total_cost + 10
|
||||
elseif row.detail and row.detail:match("SEARCH") then
|
||||
total_cost = total_cost + 2
|
||||
else
|
||||
total_cost = total_cost + 1
|
||||
end
|
||||
end
|
||||
|
||||
return total_cost
|
||||
end
|
||||
|
||||
return M
|
365
lua/notex/query/init.lua
Normal file
365
lua/notex/query/init.lua
Normal file
|
@ -0,0 +1,365 @@
|
|||
-- Query engine coordination module
|
||||
local M = {}
|
||||
|
||||
local query_parser = require('notex.query.parser')
|
||||
local query_executor = require('notex.query.executor')
|
||||
local database = require('notex.database.schema')
|
||||
local utils = require('notex.utils')
|
||||
|
||||
-- Execute query from string
|
||||
function M.execute_query(query_string, options)
|
||||
options = options or {}
|
||||
local start_time = utils.timer("Query execution")
|
||||
|
||||
-- Parse query
|
||||
local parsed_query = query_parser.parse(query_string)
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "parse_error",
|
||||
errors = parsed_query.parse_errors,
|
||||
query_string = query_string
|
||||
}
|
||||
end
|
||||
|
||||
-- Validate query
|
||||
local valid, validation_errors = query_executor.validate_query(parsed_query)
|
||||
if not valid then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "validation_error",
|
||||
errors = validation_errors,
|
||||
query_string = query_string
|
||||
}
|
||||
end
|
||||
|
||||
-- Execute query
|
||||
local result = query_executor.execute(parsed_query, options)
|
||||
|
||||
start_time()
|
||||
|
||||
-- Add metadata
|
||||
result.query_string = query_string
|
||||
result.parsed_query = parsed_query
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Execute saved query
|
||||
function M.execute_saved_query(query_name, options)
|
||||
options = options or {}
|
||||
|
||||
-- Get saved query from database
|
||||
local ok, query_result = database.queries.get_by_name(query_name)
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to retrieve saved query: " .. query_result
|
||||
}
|
||||
end
|
||||
|
||||
if not query_result then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "not_found",
|
||||
error = "Saved query not found: " .. query_name
|
||||
}
|
||||
end
|
||||
|
||||
-- Update usage statistics
|
||||
database.queries.update_usage(query_result.id)
|
||||
|
||||
-- Execute the query
|
||||
local result = M.execute_query(query_result.definition, options)
|
||||
result.query_name = query_name
|
||||
result.saved_query_id = query_result.id
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Save query for reuse
|
||||
function M.save_query(query_name, query_string, options)
|
||||
options = options or {}
|
||||
|
||||
-- Validate query before saving
|
||||
local parsed_query = query_parser.parse(query_string)
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "parse_error",
|
||||
errors = parsed_query.parse_errors
|
||||
}
|
||||
end
|
||||
|
||||
-- Check if query already exists
|
||||
local existing_query, get_err = database.queries.get_by_name(query_name)
|
||||
if get_err then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to check existing query: " .. get_err
|
||||
}
|
||||
end
|
||||
|
||||
local query_id = utils.generate_id()
|
||||
local current_time = os.time()
|
||||
|
||||
local query_data = {
|
||||
id = existing_query and existing_query.id or query_id,
|
||||
name = query_name,
|
||||
definition = query_string,
|
||||
created_at = existing_query and existing_query.created_at or current_time
|
||||
}
|
||||
|
||||
local ok, err
|
||||
if existing_query then
|
||||
ok, err = database.queries.update(query_data)
|
||||
else
|
||||
ok, err = database.queries.create(query_data)
|
||||
end
|
||||
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to save query: " .. err
|
||||
}
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
query_id = query_data.id,
|
||||
query_name = query_name,
|
||||
action = existing_query and "updated" or "created"
|
||||
}
|
||||
end
|
||||
|
||||
-- List saved queries
|
||||
function M.list_saved_queries(options)
|
||||
options = options or {}
|
||||
|
||||
local ok, queries = database.queries.get_all()
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to retrieve saved queries: " .. queries
|
||||
}
|
||||
end
|
||||
|
||||
-- Add metadata to each query
|
||||
for _, query in ipairs(queries) do
|
||||
query.definition_preview = query.definition:sub(1, 100) .. (#query.definition > 100 and "..." or "")
|
||||
query.last_used_formatted = query.last_used > 0 and os.date("%Y-%m-%d %H:%M", query.last_used) or "Never"
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
queries = queries,
|
||||
total_count = #queries
|
||||
}
|
||||
end
|
||||
|
||||
-- Delete saved query
|
||||
function M.delete_saved_query(query_name)
|
||||
local ok, query_result = database.queries.get_by_name(query_name)
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to find query: " .. query_result
|
||||
}
|
||||
end
|
||||
|
||||
if not query_result then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "not_found",
|
||||
error = "Query not found: " .. query_name
|
||||
}
|
||||
end
|
||||
|
||||
local delete_ok, delete_err = database.queries.delete(query_result.id)
|
||||
if not delete_ok then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "database_error",
|
||||
error = "Failed to delete query: " .. delete_err
|
||||
}
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
query_name = query_name,
|
||||
deleted_query_id = query_result.id
|
||||
}
|
||||
end
|
||||
|
||||
-- Get query suggestions
|
||||
function M.get_suggestions(partial_query, cursor_pos)
|
||||
cursor_pos = cursor_pos or #partial_query
|
||||
|
||||
-- Parse partial query to get context
|
||||
local parsed_query = query_parser.parse(partial_query)
|
||||
|
||||
-- Get suggestions based on context
|
||||
local suggestions = query_executor.get_suggestions(parsed_query, {
|
||||
cursor_pos = cursor_pos
|
||||
})
|
||||
|
||||
return {
|
||||
success = true,
|
||||
suggestions = suggestions,
|
||||
cursor_pos = cursor_pos
|
||||
}
|
||||
end
|
||||
|
||||
-- Validate query syntax
|
||||
function M.validate_query_syntax(query_string)
|
||||
local parsed_query = query_parser.parse(query_string)
|
||||
|
||||
return {
|
||||
valid = #parsed_query.parse_errors == 0,
|
||||
errors = parsed_query.parse_errors,
|
||||
parsed_query = parsed_query
|
||||
}
|
||||
end
|
||||
|
||||
-- Explain query
|
||||
function M.explain_query(query_string, options)
|
||||
options = options or {}
|
||||
|
||||
local parsed_query = query_parser.parse(query_string)
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
return {
|
||||
success = false,
|
||||
error_type = "parse_error",
|
||||
errors = parsed_query.parse_errors
|
||||
}
|
||||
end
|
||||
|
||||
local explanation = query_executor.explain_query(parsed_query, options)
|
||||
|
||||
return explanation
|
||||
end
|
||||
|
||||
-- Format query string
|
||||
function M.format_query(query_string)
|
||||
local parsed_query = query_parser.parse(query_string)
|
||||
if #parsed_query.parse_errors > 0 then
|
||||
return query_string, parsed_query.parse_errors
|
||||
end
|
||||
|
||||
-- Rebuild query with proper formatting
|
||||
local formatted_parts = {}
|
||||
|
||||
-- Add filters
|
||||
if next(parsed_query.filters) then
|
||||
local filter_parts = {}
|
||||
for key, value in pairs(parsed_query.filters) do
|
||||
if type(value) == "string" then
|
||||
table.insert(filter_parts, string.format('%s: "%s"', key, value))
|
||||
elseif type(value) == "table" then
|
||||
table.insert(filter_parts, string.format('%s: [%s]', key, vim.inspect(value)))
|
||||
else
|
||||
table.insert(filter_parts, string.format('%s: %s', key, tostring(value)))
|
||||
end
|
||||
end
|
||||
table.insert(formatted_parts, table.concat(filter_parts, "\n"))
|
||||
end
|
||||
|
||||
-- Add conditions
|
||||
if parsed_query.conditions then
|
||||
table.insert(formatted_parts, "WHERE " .. M.format_conditions(parsed_query.conditions))
|
||||
end
|
||||
|
||||
-- Add order by
|
||||
if parsed_query.order_by then
|
||||
table.insert(formatted_parts, string.format("ORDER BY %s %s", parsed_query.order_by.field, parsed_query.order_by.direction))
|
||||
end
|
||||
|
||||
-- Add group by
|
||||
if parsed_query.group_by then
|
||||
table.insert(formatted_parts, "GROUP BY " .. parsed_query.group_by)
|
||||
end
|
||||
|
||||
-- Add limit
|
||||
if parsed_query.limit then
|
||||
table.insert(formatted_parts, "LIMIT " .. parsed_query.limit)
|
||||
end
|
||||
|
||||
local formatted_query = table.concat(formatted_parts, "\n")
|
||||
|
||||
return formatted_query, {}
|
||||
end
|
||||
|
||||
-- Format conditions recursively
|
||||
function M.format_conditions(conditions)
|
||||
if conditions.type == "comparison" then
|
||||
return string.format("%s %s %s", conditions.field, conditions.operator, tostring(conditions.value))
|
||||
elseif conditions.type == "existence" then
|
||||
return conditions.field
|
||||
elseif conditions.clauses then
|
||||
local clause_parts = {}
|
||||
for _, clause in ipairs(conditions.clauses) do
|
||||
table.insert(clause_parts, M.format_conditions(clause))
|
||||
end
|
||||
local operator = conditions.type:upper()
|
||||
return "(" .. table.concat(clause_parts, " " .. operator .. " ") .. ")"
|
||||
end
|
||||
|
||||
return ""
|
||||
end
|
||||
|
||||
-- Get query statistics
|
||||
function M.get_query_statistics(options)
|
||||
options = options or {}
|
||||
|
||||
local stats = {
|
||||
total_queries = 0,
|
||||
saved_queries = 0,
|
||||
recent_queries = {},
|
||||
popular_queries = {},
|
||||
average_execution_time = 0
|
||||
}
|
||||
|
||||
-- Get saved queries count
|
||||
local ok, saved_queries = database.queries.get_all()
|
||||
if ok then
|
||||
stats.saved_queries = #saved_queries
|
||||
|
||||
-- Get popular queries
|
||||
local popular = {}
|
||||
for _, query in ipairs(saved_queries) do
|
||||
if query.use_count > 0 then
|
||||
table.insert(popular, {
|
||||
name = query.name,
|
||||
use_count = query.use_count,
|
||||
last_used = query.last_used
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
table.sort(popular, function(a, b) return a.use_count > b.use_count end)
|
||||
stats.popular_queries = vim.list_slice(popular, 1, 10)
|
||||
end
|
||||
|
||||
return {
|
||||
success = true,
|
||||
statistics = stats
|
||||
}
|
||||
end
|
||||
|
||||
-- Initialize query engine
|
||||
function M.init(database_path)
|
||||
local ok, err = require('notex.database.init').init(database_path)
|
||||
if not ok then
|
||||
return false, "Failed to initialize database for query engine: " .. err
|
||||
end
|
||||
|
||||
utils.log("INFO", "Query engine initialized")
|
||||
return true, "Query engine initialized successfully"
|
||||
end
|
||||
|
||||
return M
|
412
lua/notex/query/parser.lua
Normal file
412
lua/notex/query/parser.lua
Normal file
|
@ -0,0 +1,412 @@
|
|||
-- Query syntax parser module
|
||||
local M = {}
|
||||
|
||||
local utils = require('notex.utils')
|
||||
|
||||
-- Parse query string into structured object
|
||||
function M.parse(query_string)
|
||||
local result = {
|
||||
filters = {},
|
||||
conditions = nil,
|
||||
order_by = nil,
|
||||
group_by = nil,
|
||||
limit = nil,
|
||||
raw_query = query_string,
|
||||
parse_errors = {}
|
||||
}
|
||||
|
||||
if not query_string or query_string == "" then
|
||||
table.insert(result.parse_errors, "Empty query string")
|
||||
return result
|
||||
end
|
||||
|
||||
-- Extract query block from markdown
|
||||
local query_content = M.extract_query_block(query_string)
|
||||
if not query_content then
|
||||
table.insert(result.parse_errors, "No valid query block found")
|
||||
return result
|
||||
end
|
||||
|
||||
-- Parse query lines
|
||||
local lines = M.split_lines(query_content)
|
||||
local current_section = "filters" -- filters, where, order_by, group_by, limit
|
||||
|
||||
for _, line in ipairs(lines) do
|
||||
line = line:trim()
|
||||
if line == "" or line:match("^%s*%-%-%-") then
|
||||
-- Skip empty lines and comments
|
||||
continue
|
||||
end
|
||||
|
||||
-- Detect section changes
|
||||
local section = M.detect_section(line)
|
||||
if section then
|
||||
current_section = section
|
||||
continue
|
||||
end
|
||||
|
||||
-- Parse based on current section
|
||||
if current_section == "filters" then
|
||||
M.parse_filter_line(line, result)
|
||||
elseif current_section == "where" then
|
||||
M.parse_condition_line(line, result)
|
||||
elseif current_section == "order_by" then
|
||||
M.parse_order_by_line(line, result)
|
||||
elseif current_section == "group_by" then
|
||||
M.parse_group_by_line(line, result)
|
||||
elseif current_section == "limit" then
|
||||
M.parse_limit_line(line, result)
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate parsed query
|
||||
M.validate_parsed_query(result)
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Extract query block from markdown content
|
||||
function M.extract_query_block(content)
|
||||
-- Match ```notex-query blocks
|
||||
local start_pos, end_pos, query_content = content:match("```notex%-query%s*\n(.*)\n```")
|
||||
if query_content then
|
||||
return query_content
|
||||
end
|
||||
|
||||
-- Match inline query format
|
||||
local inline_query = content:match("```notex%-query%s*\n(.*)")
|
||||
if inline_query then
|
||||
return inline_query
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Split content into lines
|
||||
function M.split_lines(content)
|
||||
local lines = {}
|
||||
for line in content:gmatch("[^\r\n]+") do
|
||||
table.insert(lines, line)
|
||||
end
|
||||
return lines
|
||||
end
|
||||
|
||||
-- Detect query section
|
||||
function M.detect_section(line)
|
||||
local upper_line = line:upper()
|
||||
|
||||
if upper_line:match("^FROM%s+") then
|
||||
return "filters"
|
||||
elseif upper_line:match("^WHERE%s+") then
|
||||
return "where"
|
||||
elseif upper_line:match("^ORDER%s+BY%s+") then
|
||||
return "order_by"
|
||||
elseif upper_line:match("^GROUP%s+BY%s+") then
|
||||
return "group_by"
|
||||
elseif upper_line:match("^LIMIT%s+") then
|
||||
return "limit"
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Parse filter line (FROM clause or direct property filters)
|
||||
function M.parse_filter_line(line, result)
|
||||
-- Handle FROM clause
|
||||
local from_match = line:match("^FROM%s+(.+)")
|
||||
if from_match then
|
||||
M.parse_property_filters(from_match, result)
|
||||
return
|
||||
end
|
||||
|
||||
-- Handle direct property filters
|
||||
M.parse_property_filters(line, result)
|
||||
end
|
||||
|
||||
-- Parse property filters
|
||||
function M.parse_property_filters(filter_string, result)
|
||||
-- Parse YAML-style property filters
|
||||
local yaml_filters = M.parse_yaml_filters(filter_string)
|
||||
for key, value in pairs(yaml_filters) do
|
||||
result.filters[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
-- Parse YAML-style filters
|
||||
function M.parse_yaml_filters(yaml_string)
|
||||
local filters = {}
|
||||
|
||||
-- Handle key: value pairs
|
||||
for key, value in yaml_string:gmatch("(%w+)%s*:%s*(.+)") do
|
||||
-- Parse quoted values
|
||||
local quoted_value = value:match('^"(.*)"$')
|
||||
if quoted_value then
|
||||
filters[key] = quoted_value
|
||||
else
|
||||
-- Parse array values
|
||||
local array_match = value:match("^%[(.*)%]$")
|
||||
if array_match then
|
||||
local array_values = {}
|
||||
for item in array_match:gsub("%s", ""):gmatch("[^,]+") do
|
||||
table.insert(array_values, item:gsub("^['\"](.*)['\"]$", "%1"))
|
||||
end
|
||||
filters[key] = array_values
|
||||
else
|
||||
filters[key] = value:trim()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return filters
|
||||
end
|
||||
|
||||
-- Parse WHERE condition line
|
||||
function M.parse_condition_line(line, result)
|
||||
local condition_string = line:match("^WHERE%s+(.+)")
|
||||
if not condition_string then
|
||||
table.insert(result.parse_errors, "Invalid WHERE clause: " .. line)
|
||||
return
|
||||
end
|
||||
|
||||
result.conditions = M.parse_conditions(condition_string)
|
||||
end
|
||||
|
||||
-- Parse conditions with logical operators
|
||||
function M.parse_conditions(condition_string)
|
||||
local conditions = {
|
||||
type = "AND",
|
||||
clauses = {}
|
||||
}
|
||||
|
||||
-- Split by AND/OR operators
|
||||
local and_parts = M.split_logical_operators(condition_string, "AND")
|
||||
local or_parts = {}
|
||||
|
||||
-- Check if this is an OR condition
|
||||
for _, part in ipairs(and_parts) do
|
||||
if part:match("OR") then
|
||||
local or_split = M.split_logical_operators(part, "OR")
|
||||
if #or_split > 1 then
|
||||
conditions.type = "OR"
|
||||
for _, or_part in ipairs(or_split) do
|
||||
table.insert(conditions.clauses, M.parse_single_condition(or_part))
|
||||
end
|
||||
return conditions
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Parse AND conditions
|
||||
for _, and_part in ipairs(and_parts) do
|
||||
table.insert(conditions.clauses, M.parse_single_condition(and_part))
|
||||
end
|
||||
|
||||
return conditions
|
||||
end
|
||||
|
||||
-- Split by logical operator
|
||||
function M.split_logical_operators(string, operator)
|
||||
local parts = {}
|
||||
local pattern = "%s+" .. operator .. "%s+"
|
||||
|
||||
for part in string:gmatch("([^" .. pattern .. "]+)") do
|
||||
table.insert(parts, part:trim())
|
||||
end
|
||||
|
||||
return parts
|
||||
end
|
||||
|
||||
-- Parse single condition
|
||||
function M.parse_single_condition(condition_string)
|
||||
condition_string = condition_string:trim()
|
||||
|
||||
-- Handle NOT operator
|
||||
local negated = false
|
||||
if condition_string:match("^NOT%s+") then
|
||||
negated = true
|
||||
condition_string = condition_string:sub(4):trim()
|
||||
end
|
||||
|
||||
-- Parse comparison operators
|
||||
local operators = {
|
||||
{ pattern = ">=%s*", type = ">=" },
|
||||
{ pattern = "<=%s*", type = "<=" },
|
||||
{ pattern = "!=%s*", type = "!=" },
|
||||
{ pattern = ">%s*", type = ">" },
|
||||
{ pattern = "<%s*", type = "<" },
|
||||
{ pattern = "=%s*", type = "=" },
|
||||
{ pattern = "%s+CONTAINS%s+", type = "CONTAINS" },
|
||||
{ pattern = "%s+STARTS_WITH%s+", type = "STARTS_WITH" },
|
||||
{ pattern = "%s+ENDS_WITH%s+", type = "ENDS_WITH" },
|
||||
{ pattern = "%s+INCLUDES%s+", type = "INCLUDES" },
|
||||
{ pattern = "%s+BEFORE%s+", type = "BEFORE" },
|
||||
{ pattern = "%s+AFTER%s+", type = "AFTER" },
|
||||
{ pattern = "%s+WITHIN%s+", type = "WITHIN" }
|
||||
}
|
||||
|
||||
for _, op in ipairs(operators) do
|
||||
local field, value = condition_string:match("^(.-)" .. op.pattern .. "(.+)$")
|
||||
if field and value then
|
||||
return {
|
||||
type = "comparison",
|
||||
field = field:trim(),
|
||||
operator = op.type,
|
||||
value = M.parse_value(value:trim()),
|
||||
negated = negated
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
-- If no operator found, treat as existence check
|
||||
return {
|
||||
type = "existence",
|
||||
field = condition_string,
|
||||
negated = negated
|
||||
}
|
||||
end
|
||||
|
||||
-- Parse value (handle quotes, numbers, booleans)
|
||||
function M.parse_value(value_string)
|
||||
-- Handle quoted strings
|
||||
local quoted = value_string:match('^"(.*)"$')
|
||||
if quoted then
|
||||
return quoted
|
||||
end
|
||||
|
||||
-- Handle single quoted strings
|
||||
quoted = value_string:match("^'(.*)'$")
|
||||
if quoted then
|
||||
return quoted
|
||||
end
|
||||
|
||||
-- Handle numbers
|
||||
local number = tonumber(value_string)
|
||||
if number then
|
||||
return number
|
||||
end
|
||||
|
||||
-- Handle booleans
|
||||
local lower = value_string:lower()
|
||||
if lower == "true" then
|
||||
return true
|
||||
elseif lower == "false" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Handle date/time relative values (e.g., "7d" for 7 days)
|
||||
local time_match = value_string:match("^(%d+)([hdwmy])$")
|
||||
if time_match then
|
||||
local amount = tonumber(time_match[1])
|
||||
local unit = time_match[2]
|
||||
return { type = "relative_time", amount = amount, unit = unit }
|
||||
end
|
||||
|
||||
-- Default to string
|
||||
return value_string
|
||||
end
|
||||
|
||||
-- Parse ORDER BY line
|
||||
function M.parse_order_by_line(line, result)
|
||||
local order_string = line:match("^ORDER%s+BY%s+(.+)")
|
||||
if not order_string then
|
||||
table.insert(result.parse_errors, "Invalid ORDER BY clause: " .. line)
|
||||
return
|
||||
end
|
||||
|
||||
local field, direction = order_string:match("^(%w+)%s*(ASC|DESC)?$")
|
||||
if not field then
|
||||
table.insert(result.parse_errors, "Invalid ORDER BY format: " .. order_string)
|
||||
return
|
||||
end
|
||||
|
||||
result.order_by = {
|
||||
field = field,
|
||||
direction = direction and direction:upper() or "ASC"
|
||||
}
|
||||
end
|
||||
|
||||
-- Parse GROUP BY line
|
||||
function M.parse_group_by_line(line, result)
|
||||
local group_string = line:match("^GROUP%s+BY%s+(.+)")
|
||||
if not group_string then
|
||||
table.insert(result.parse_errors, "Invalid GROUP BY clause: " .. line)
|
||||
return
|
||||
end
|
||||
|
||||
result.group_by = group_string:trim()
|
||||
end
|
||||
|
||||
-- Parse LIMIT line
|
||||
function M.parse_limit_line(line, result)
|
||||
local limit_string = line:match("^LIMIT%s+(.+)")
|
||||
if not limit_string then
|
||||
table.insert(result.parse_errors, "Invalid LIMIT clause: " .. line)
|
||||
return
|
||||
end
|
||||
|
||||
local limit = tonumber(limit_string)
|
||||
if not limit or limit <= 0 then
|
||||
table.insert(result.parse_errors, "Invalid LIMIT value: " .. limit_string)
|
||||
return
|
||||
end
|
||||
|
||||
result.limit = limit
|
||||
end
|
||||
|
||||
-- Validate parsed query
|
||||
function M.validate_parsed_query(query)
|
||||
-- Check if we have any filters or conditions
|
||||
if next(query.filters) == nil and not query.conditions then
|
||||
table.insert(query.parse_errors, "Query must have at least one filter or condition")
|
||||
end
|
||||
|
||||
-- Validate field names (basic check)
|
||||
local valid_fields = {} -- Could be populated from schema
|
||||
for field, _ in pairs(query.filters) do
|
||||
if not M.is_valid_field_name(field) then
|
||||
table.insert(query.parse_errors, "Invalid field name: " .. field)
|
||||
end
|
||||
end
|
||||
|
||||
-- Validate conditions
|
||||
if query.conditions then
|
||||
M.validate_conditions(query.conditions, query.parse_errors)
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if field name is valid
|
||||
function M.is_valid_field_name(field)
|
||||
return field:match("^[%w_%-%.]+$") ~= nil
|
||||
end
|
||||
|
||||
-- Validate conditions recursively
|
||||
function M.validate_conditions(conditions, errors)
|
||||
if conditions.type == "comparison" then
|
||||
if not M.is_valid_field_name(conditions.field) then
|
||||
table.insert(errors, "Invalid field name in condition: " .. conditions.field)
|
||||
end
|
||||
elseif conditions.type == "existence" then
|
||||
if not M.is_valid_field_name(conditions.field) then
|
||||
table.insert(errors, "Invalid field name in existence check: " .. conditions.field)
|
||||
end
|
||||
elseif conditions.clauses then
|
||||
for _, clause in ipairs(conditions.clauses) do
|
||||
M.validate_conditions(clause, errors)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Generate query hash for caching
|
||||
function M.generate_query_hash(query)
|
||||
local hash_input = vim.json.encode({
|
||||
filters = query.filters,
|
||||
conditions = query.conditions,
|
||||
order_by = query.order_by,
|
||||
group_by = query.group_by,
|
||||
limit = query.limit
|
||||
})
|
||||
|
||||
return utils.sha256(hash_input)
|
||||
end
|
||||
|
||||
return M
|
Loading…
Add table
Add a link
Reference in a new issue