Initial vibecoded proof of concept
This commit is contained in:
parent
74812459af
commit
461318a656
61 changed files with 13306 additions and 0 deletions
0
tests/integration/main_spec.lua
Normal file
0
tests/integration/main_spec.lua
Normal file
129
tests/integration/test_document_indexing.lua
Normal file
129
tests/integration/test_document_indexing.lua
Normal file
|
@ -0,0 +1,129 @@
|
|||
-- Integration tests for document indexing workflow
|
||||
local busted = require('busted')
|
||||
|
||||
describe("Document Indexing Workflow Integration", function()
|
||||
local indexer
|
||||
|
||||
before_each(function()
|
||||
-- These modules don't exist yet - tests should fail
|
||||
indexer = require('notex.index')
|
||||
end)
|
||||
|
||||
it("should index markdown files with YAML headers", function()
|
||||
local test_files = {
|
||||
"/tmp/test_doc1.md",
|
||||
"/tmp/test_doc2.md"
|
||||
}
|
||||
|
||||
-- Create test markdown files
|
||||
local file1 = io.open(test_files[1], "w")
|
||||
file1:write([[
|
||||
---
|
||||
title: "Test Document 1"
|
||||
status: "draft"
|
||||
priority: "high"
|
||||
tags: ["test", "urgent"]
|
||||
---
|
||||
# Test Document 1
|
||||
|
||||
This is a test document with YAML header.
|
||||
]])
|
||||
file1:close()
|
||||
|
||||
local file2 = io.open(test_files[2], "w")
|
||||
file2:write([[
|
||||
---
|
||||
title: "Test Document 2"
|
||||
status: "review"
|
||||
priority: "medium"
|
||||
tags: ["test", "review"]
|
||||
---
|
||||
# Test Document 2
|
||||
|
||||
Another test document.
|
||||
]])
|
||||
file2:close()
|
||||
|
||||
-- Index the documents
|
||||
local result = indexer.index_documents("/tmp")
|
||||
|
||||
assert.is_true(result.success)
|
||||
assert.are.equal(2, result.indexed_count)
|
||||
|
||||
-- Clean up
|
||||
os.remove(test_files[1])
|
||||
os.remove(test_files[2])
|
||||
end)
|
||||
|
||||
it("should handle documents with malformed YAML headers", function()
|
||||
local malformed_file = "/tmp/malformed.md"
|
||||
|
||||
local file = io.open(malformed_file, "w")
|
||||
file:write([[
|
||||
---
|
||||
title: "Malformed Document"
|
||||
status: "draft"
|
||||
invalid_yaml: [unclosed array
|
||||
---
|
||||
# Malformed Document
|
||||
|
||||
This has bad YAML.
|
||||
]])
|
||||
file1:close()
|
||||
|
||||
local result = indexer.index_documents("/tmp")
|
||||
|
||||
assert.is_true(result.success)
|
||||
assert.are.equal(0, result.indexed_count)
|
||||
assert.is_not_nil(result.errors)
|
||||
assert.are.equal(1, #result.errors)
|
||||
|
||||
-- Clean up
|
||||
os.remove(malformed_file)
|
||||
end)
|
||||
|
||||
it("should incrementally update index when files change", function()
|
||||
local test_file = "/tmp/incremental_test.md"
|
||||
|
||||
-- Create initial document
|
||||
local file = io.open(test_file, "w")
|
||||
file:write([[
|
||||
---
|
||||
title: "Incremental Test"
|
||||
status: "draft"
|
||||
---
|
||||
# Incremental Test
|
||||
|
||||
Initial content.
|
||||
]])
|
||||
file:close()
|
||||
|
||||
-- Initial indexing
|
||||
local result1 = indexer.index_documents("/tmp")
|
||||
assert.is_true(result1.success)
|
||||
assert.are.equal(1, result1.indexed_count)
|
||||
|
||||
-- Modify the file
|
||||
vim.wait(100) -- Ensure different timestamp
|
||||
local file2 = io.open(test_file, "w")
|
||||
file2:write([[
|
||||
---
|
||||
title: "Incremental Test"
|
||||
status: "review"
|
||||
priority: "high"
|
||||
---
|
||||
# Incremental Test
|
||||
|
||||
Modified content.
|
||||
]])
|
||||
file2:close()
|
||||
|
||||
-- Incremental update
|
||||
local result2 = indexer.update_index("/tmp")
|
||||
assert.is_true(result2.success)
|
||||
assert.are.equal(1, result2.updated_count)
|
||||
|
||||
-- Clean up
|
||||
os.remove(test_file)
|
||||
end)
|
||||
end)
|
143
tests/integration/test_query_workflow.lua
Normal file
143
tests/integration/test_query_workflow.lua
Normal file
|
@ -0,0 +1,143 @@
|
|||
-- Integration tests for query workflow
|
||||
local busted = require('busted')
|
||||
|
||||
describe("Query Workflow Integration", function()
|
||||
local query_engine
|
||||
|
||||
before_each(function()
|
||||
-- These modules don't exist yet - tests should fail
|
||||
query_engine = require('notex.query')
|
||||
end)
|
||||
|
||||
it("should execute end-to-end query workflow", function()
|
||||
-- Setup test data
|
||||
local test_documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
file_path = "/tmp/doc1.md",
|
||||
properties = {
|
||||
title = "Project Plan",
|
||||
status = "draft",
|
||||
priority = "high",
|
||||
created_at = "2024-03-15T10:30:00Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
id = "doc2",
|
||||
file_path = "/tmp/doc2.md",
|
||||
properties = {
|
||||
title = "Meeting Notes",
|
||||
status = "review",
|
||||
priority = "medium",
|
||||
created_at = "2024-03-14T15:20:00Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-- Initialize query engine with test data
|
||||
query_engine.initialize(test_documents)
|
||||
|
||||
-- Execute query
|
||||
local query_string = [[
|
||||
```notex-query
|
||||
status: "draft"
|
||||
priority: "high"
|
||||
ORDER BY created_at DESC
|
||||
```
|
||||
]]
|
||||
local result = query_engine.execute_query(query_string)
|
||||
|
||||
-- Validate results
|
||||
assert.is_not_nil(result.documents)
|
||||
assert.are.equal(1, #result.documents)
|
||||
assert.are.equal("Project Plan", result.documents[1].properties.title)
|
||||
assert.are.equal("draft", result.documents[1].properties.status)
|
||||
assert.is_true(result.execution_time_ms < 100) -- Performance requirement
|
||||
end)
|
||||
|
||||
it("should handle complex queries with conditions", function()
|
||||
local test_documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
properties = {
|
||||
title = "Important Task",
|
||||
status = "active",
|
||||
priority = 5,
|
||||
created_at = "2024-01-15T10:00:00Z",
|
||||
tags = {"urgent", "project"}
|
||||
}
|
||||
},
|
||||
{
|
||||
id = "doc2",
|
||||
properties = {
|
||||
title = "Regular Task",
|
||||
status = "active",
|
||||
priority = 2,
|
||||
created_at = "2024-02-01T14:30:00Z",
|
||||
tags = {"routine"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query_engine.initialize(test_documents)
|
||||
|
||||
local complex_query = [[
|
||||
```notex-query
|
||||
FROM status: "active"
|
||||
WHERE priority > 3 AND tags INCLUDES "urgent"
|
||||
ORDER BY created_at DESC
|
||||
```
|
||||
]]
|
||||
local result = query_engine.execute_query(complex_query)
|
||||
|
||||
assert.are.equal(1, #result.documents)
|
||||
assert.are.equal("Important Task", result.documents[1].properties.title)
|
||||
end)
|
||||
|
||||
it("should handle queries that return no results", function()
|
||||
local test_documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
properties = {
|
||||
title = "Document 1",
|
||||
status = "archived"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query_engine.initialize(test_documents)
|
||||
|
||||
local query = [[
|
||||
```notex-query
|
||||
status: "active"
|
||||
```
|
||||
]]
|
||||
local result = query_engine.execute_query(query)
|
||||
|
||||
assert.are.equal(0, #result.documents)
|
||||
assert.are.equal(0, result.total_count)
|
||||
end)
|
||||
|
||||
it("should save and reuse queries", function()
|
||||
local query_name = "My Active Tasks"
|
||||
local query_definition = [[
|
||||
```notex-query
|
||||
status: "active"
|
||||
priority: "high"
|
||||
```
|
||||
]]
|
||||
|
||||
-- Save query
|
||||
local save_result = query_engine.save_query(query_name, query_definition)
|
||||
assert.is_true(save_result.success)
|
||||
|
||||
-- List saved queries
|
||||
local queries = query_engine.list_saved_queries()
|
||||
assert.is_not_nil(queries[query_name])
|
||||
|
||||
-- Execute saved query
|
||||
local result = query_engine.execute_saved_query(query_name)
|
||||
assert.is_not_nil(result)
|
||||
assert.is_number(result.execution_time_ms)
|
||||
end)
|
||||
end)
|
174
tests/integration/test_virtual_buffer.lua
Normal file
174
tests/integration/test_virtual_buffer.lua
Normal file
|
@ -0,0 +1,174 @@
|
|||
-- Integration tests for virtual buffer workflow
|
||||
local busted = require('busted')
|
||||
|
||||
describe("Virtual Buffer Workflow Integration", function()
|
||||
local ui_manager
|
||||
|
||||
before_each(function()
|
||||
-- These modules don't exist yet - tests should fail
|
||||
ui_manager = require('notex.ui')
|
||||
end)
|
||||
|
||||
it("should create virtual buffer for query results", function()
|
||||
local query_results = {
|
||||
documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
file_path = "/tmp/document.md",
|
||||
properties = {
|
||||
title = "Test Document",
|
||||
status = "draft",
|
||||
priority = "high",
|
||||
created_at = "2024-03-15T10:30:00Z"
|
||||
}
|
||||
}
|
||||
},
|
||||
total_count = 1,
|
||||
execution_time_ms = 25
|
||||
}
|
||||
|
||||
local buffer_result = ui_manager.show_query_results(query_results)
|
||||
|
||||
assert.is_not_nil(buffer_result.buffer_id)
|
||||
assert.is_not_nil(buffer_result.window_id)
|
||||
assert.is_table(buffer_result.lines)
|
||||
assert.is_table(buffer_result.mappings)
|
||||
|
||||
-- Verify buffer content
|
||||
local lines = buffer_result.lines
|
||||
assert.is_true(#lines > 0)
|
||||
|
||||
-- Check for header line
|
||||
local found_header = false
|
||||
for _, line in ipairs(lines) do
|
||||
if line:find("Results for:") then
|
||||
found_header = true
|
||||
break
|
||||
end
|
||||
end
|
||||
assert.is_true(found_header, "Buffer should contain query results header")
|
||||
end)
|
||||
|
||||
it("should handle keyboard interactions in virtual buffer", function()
|
||||
local query_results = {
|
||||
documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
file_path = "/tmp/document.md",
|
||||
properties = {
|
||||
title = "Test Document",
|
||||
status = "draft"
|
||||
}
|
||||
}
|
||||
},
|
||||
total_count = 1
|
||||
}
|
||||
|
||||
local buffer_result = ui_manager.show_query_results(query_results)
|
||||
|
||||
-- Test opening document with Enter key
|
||||
local open_result = ui_manager.handle_keypress(buffer_result.buffer_id, "<CR>", 3)
|
||||
assert.is_true(open_result.success)
|
||||
assert.are.equal("/tmp/document.md", open_result.file_path)
|
||||
|
||||
-- Test editing with 'e' key
|
||||
local edit_result = ui_manager.handle_keypress(buffer_result.buffer_id, "e", 3)
|
||||
assert.is_true(edit_result.success)
|
||||
assert.are.equal("doc1", edit_result.document_id)
|
||||
|
||||
-- Test closing with 'q' key
|
||||
local close_result = ui_manager.handle_keypress(buffer_result.buffer_id, "q", 1)
|
||||
assert.is_true(close_result.success)
|
||||
end)
|
||||
|
||||
it("should update document properties through virtual buffer", function()
|
||||
-- Create initial buffer
|
||||
local query_results = {
|
||||
documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
file_path = "/tmp/document.md",
|
||||
properties = {
|
||||
title = "Test Document",
|
||||
status = "draft",
|
||||
priority = "high"
|
||||
}
|
||||
}
|
||||
},
|
||||
total_count = 1
|
||||
}
|
||||
|
||||
local buffer_result = ui_manager.show_query_results(query_results)
|
||||
|
||||
-- Simulate user editing status field
|
||||
local update_result = ui_manager.update_property_in_buffer(
|
||||
buffer_result.buffer_id,
|
||||
3, -- line number
|
||||
2, -- column number
|
||||
"review" -- new value
|
||||
)
|
||||
|
||||
assert.is_true(update_result.success)
|
||||
assert.are.equal("doc1", update_result.document_id)
|
||||
assert.are.equal("status", update_result.property)
|
||||
assert.are.equal("review", update_result.new_value)
|
||||
assert.are.equal("draft", update_result.old_value)
|
||||
|
||||
-- Verify the underlying file was updated
|
||||
local updated_content = ui_manager.get_document_content("doc1")
|
||||
assert.is_not_nil(updated_content:find('status: "review"'))
|
||||
end)
|
||||
|
||||
it("should handle large query result sets efficiently", function()
|
||||
-- Generate large test dataset
|
||||
local large_results = { documents = {}, total_count = 1000, execution_time_ms = 45 }
|
||||
|
||||
for i = 1, 1000 do
|
||||
table.insert(large_results.documents, {
|
||||
id = "doc" .. i,
|
||||
file_path = "/tmp/doc" .. i .. ".md",
|
||||
properties = {
|
||||
title = "Document " .. i,
|
||||
status = i % 2 == 0 and "active" or "draft",
|
||||
priority = math.random(1, 5)
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
local start_time = vim.loop.hrtime()
|
||||
local buffer_result = ui_manager.show_query_results(large_results)
|
||||
local end_time = vim.loop.hrtime()
|
||||
|
||||
local buffer_creation_time = (end_time - start_time) / 1e6 -- Convert to milliseconds
|
||||
|
||||
assert.is_not_nil(buffer_result.buffer_id)
|
||||
assert.is_true(buffer_creation_time < 100, "Buffer creation should be under 100ms")
|
||||
assert.is_true(#buffer_result.lines <= 100, "Should limit lines for performance")
|
||||
end)
|
||||
|
||||
it("should gracefully handle buffer cleanup", function()
|
||||
local query_results = {
|
||||
documents = {
|
||||
{
|
||||
id = "doc1",
|
||||
properties = { title = "Test Document" }
|
||||
}
|
||||
},
|
||||
total_count = 1
|
||||
}
|
||||
|
||||
local buffer_result = ui_manager.show_query_results(query_results)
|
||||
|
||||
-- Verify buffer exists
|
||||
local buffer_exists = vim.api.nvim_buf_is_valid(buffer_result.buffer_id)
|
||||
assert.is_true(buffer_exists)
|
||||
|
||||
-- Close buffer
|
||||
local close_result = ui_manager.close_query_view(buffer_result.buffer_id)
|
||||
assert.is_true(close_result.success)
|
||||
|
||||
-- Verify buffer no longer exists
|
||||
buffer_exists = vim.api.nvim_buf_is_valid(buffer_result.buffer_id)
|
||||
assert.is_false(buffer_exists)
|
||||
end)
|
||||
end)
|
Loading…
Add table
Add a link
Reference in a new issue