diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..b09b958 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,40 @@ +name: Test + +# on: + # push: + # branches: + # - "main" + +on: + workflow_dispatch: + inputs: + branch: + description: 'Branch to run the workflow on' + required: true + default: 'feat-struct-refactor' # pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + nvim-versions: ["stable", "nightly"] + + name: test + steps: + - name: checkout + uses: actions/checkout@v3 + + - name: Prepare dependencies + run: | + git clone --depth 1 https://github.com/nvim-lua/plenary.nvim ~/.local/share/nvim/site/pack/vendor/start/plenary.nvim + git clone --depth 1 https://github.com/nvim-neotest/nvim-nio ~/.local/share/nvim/site/pack/vendor/start/nvim-nio + nvim --headless -c 'TSInstallSync lua | quit' + + - uses: rhysd/action-setup-vim@v1 + with: + neovim: true + version: ${{ matrix.nvim-versions }} + + - name: run tests + run: make test diff --git a/Makefile b/Makefile index ccda11c..58fb301 100644 --- a/Makefile +++ b/Makefile @@ -1,16 +1,18 @@ SRC_DIR=lua -TESTS_DIR=tests +TESTS_DIR=./tests +PREPARE_CONFIG=${TESTS_DIR}/prepare-config.lua TEST_CONFIG=${TESTS_DIR}/minimal_init.lua -.PHONY: test lint format all +.PHONY: test lint format -checks: format lint +checks: format lint test -test: - nvim --headless -c "PlenaryBustedDirectory {minimal_init = '${TEST_CONFIG}'}" lint: luacheck ${SRC_DIR} format: - ~/.cargo/bin//stylua ${SRC_DIR} --config-path=.stylua.toml - + ~/.cargo/bin/stylua ${SRC_DIR} ${TESTS_DIR} --config-path=.stylua.toml +test: + @nvim -v + # @nvim --headless --noplugin -u ${TEST_CONFIG} \ + # -c "PlenaryBustedDirectory ${TESTS_DIR} {minimal_init = '${TEST_CONFIG}'}" diff --git a/lua/neotest-jdtls/junit/common.lua b/lua/neotest-jdtls/junit/common.lua new file mode 100644 index 0000000..67b9a52 --- /dev/null +++ b/lua/neotest-jdtls/junit/common.lua @@ -0,0 +1,21 @@ +local M = {} + +function M.get_short_error_message(result) + if result.actual and result.expected then + return string.format( + 'Expected: [%s] but was [%s]', + result.expected[1], + result.actual[1] + ) + end + local trace_result = '' + for idx, trace in ipairs(result.trace) do + trace_result = trace_result .. trace + if idx > 3 then + break + end + end + return trace_result +end + +return M diff --git a/lua/neotest-jdtls/junit/dynamic_test_result.lua b/lua/neotest-jdtls/junit/dynamic_test_result.lua new file mode 100644 index 0000000..b531574 --- /dev/null +++ b/lua/neotest-jdtls/junit/dynamic_test_result.lua @@ -0,0 +1,87 @@ +local class = require('neotest-jdtls.utils.class') +local get_short_error_message = + require('neotest-jdtls.junit.common').get_short_error_message +local lib = require('neotest.lib') +local async = require('neotest.async') +local TestStatus = require('neotest-jdtls.types.enums').TestStatus + +---@class DynamicTestResult +---@field invocations string[] +---@field nodes java_test.TestResults[] +local DynamicTestResult = class() +function DynamicTestResult:_init() + self.is_dynamic_test = true + self.invocations = {} + self.nodes = {} + self.status = nil + self.errors = {} + self.output = {} + self.all = {} +end + +function DynamicTestResult:append_invocation(invocation, node) + assert(invocation) + assert(node) + + self.all[invocation] = node + table.insert(self.invocations, invocation) + table.insert(self.nodes, node) +end + +function DynamicTestResult:get_neotest_result() + local all = 0 + for invocation, node in pairs(self.all) do + all = all + 1 + self:append(invocation, node) + end + local results_path = async.fn.tempname() + + table.insert( + self.output, + 1, + string.format( + 'Total invocations: %s\nSuccess: %s\nFailed: %s\n', + all, + all - #self.errors, + #self.errors + ) + ) + lib.files.write(results_path, table.concat(self.output, '\n')) + return { + status = self.status, + output = results_path, + errors = self.errors, + } +end + +function DynamicTestResult:append(invocation, node) + table.insert( + self.output, + string.format( + '\n----------------%s----------------', + node.result.status or TestStatus.Passed + ) + ) + table.insert( + self.output, + string.format('Invocation %s: %s', invocation, node.display_name) + ) + table.insert(self.output, '----------------Output----------------') + + if node.result.status == TestStatus.Failed then + local short_message = get_short_error_message(node.result) + self.status = TestStatus.Failed + table.insert(self.errors, { message = short_message }) + table.insert(self.output, table.concat(node.result.trace, '\n')) + else + if self.status == nil then + self.status = TestStatus.Passed + end + table.insert( + self.output, + 'The console output is available in the DAP console.\n' + ) + end +end + +return DynamicTestResult diff --git a/lua/neotest-jdtls/junit/reports/junit.lua b/lua/neotest-jdtls/junit/reports/junit.lua deleted file mode 100644 index 30691a6..0000000 --- a/lua/neotest-jdtls/junit/reports/junit.lua +++ /dev/null @@ -1,51 +0,0 @@ -local class = require('neotest-jdtls.utils.class') -local log = require('neotest-jdtls.utils.log') -local ResultParserFactory = - require('neotest-jdtls.junit.results.result-parser-factory') - ----@class java_test.JUnitTestReport ----@field private conn uv_tcp_t ----@field private result_parser java_test.TestParser ----@field private result_parser_fac java_test.TestParserFactory -local JUnitReport = class() - -function JUnitReport:_init() - self.conn = nil - self.result_parser_fac = ResultParserFactory() -end - ----Returns the test results ----@return java_test.TestResults[] -function JUnitReport:get_results() - return self.result_parser:get_test_details() -end - ----Returns a stream reader function ----@param conn uv_tcp_t ----@return fun(err: string, buffer: string) # callback function -function JUnitReport:get_stream_reader(conn) - self.conn = conn - self.result_parser = self.result_parser_fac:get_parser() - return vim.schedule_wrap(function(err, buffer) - if err then - self.conn:close() - return - end - - if buffer then - self:on_update(buffer) - else - self.conn:close() - end - end) -end - ----Runs on connection update ----@private ----@param text string -function JUnitReport:on_update(text) - log.trace('on_update', text) - self.result_parser:parse(text) -end - -return JUnitReport diff --git a/lua/neotest-jdtls/junit/result_parser.lua b/lua/neotest-jdtls/junit/result_parser.lua new file mode 100644 index 0000000..192355b --- /dev/null +++ b/lua/neotest-jdtls/junit/result_parser.lua @@ -0,0 +1,504 @@ +local class = require('neotest-jdtls.utils.class') +local log = require('neotest-jdtls.utils.log') +local async = require('neotest.async') +local lib = require('neotest.lib') +local TestStatus = require('neotest-jdtls.types.enums').TestStatus +local TestKind = require('neotest-jdtls.types.enums').TestKind +local DynamicTestResult = require('neotest-jdtls.junit.dynamic_test_result') +local get_short_error_message = + require('neotest-jdtls.junit.common').get_short_error_message +local BaseParser = require('neotest-jdtls.utils.base_parser') + +local default_passed_test_output = + 'The console output is available in the DAP console.' +---@type string|nil +local default_passed_test_output_path = nil + +---@enum java_test.TestExecStatus +local TestExecStatus = { + Started = 'started', + Ended = 'ended', +} + +---@enum MessageId +local MessageId = { + TestTree = '%TSTTREE', + TestStart = '%TESTS', + TestEnd = '%TESTE', + TestFailed = '%FAILED', + TestError = '%ERROR', + ExpectStart = '%EXPECTS', + ExpectEnd = '%EXPECTE', + ActualStart = '%ACTUALS', + ActualEnd = '%ACTUALE', + TraceStart = '%TRACES', + TraceEnd = '%TRACEE', + IGNORE_TEST_PREFIX = '@Ignore: ', + ASSUMPTION_FAILED_TEST_PREFIX = '@AssumptionFailure: ', +} + +local JUnitTestPart = { + CLASS = 'class:', + NESTED_CLASS = 'nested-class:', + METHOD = 'method:', + TEST_FACTORY = 'test-factory:', + -- Property id is for jqwik + PROPERTY = 'property:', + TEST_TEMPLATE = 'test-template:', + TEST_TEMPLATE_INVOCATION = 'test-template-invocation:', + DYNAMIC_CONTAINER = 'dynamic-container:', + DYNAMIC_TEST = 'dynamic-test:', +} + +local array_lookup = { + ['%5BB'] = 'byte[]', + ['%5BS'] = 'short[]', + ['%5BI'] = 'int[]', + ['%5BJ'] = 'long[]', + ['%5BF'] = 'float[]', + ['%5BD'] = 'double[]', + ['%5BC'] = 'char[]', + ['%5BZ'] = 'boolean[]', +} + +---@class java_test.JunitTestParser : BaseParser +---@field private test_details java_test.TestResults[] +local JunitTestParser = class(BaseParser) + +---@param context TestContext +function JunitTestParser:_init(context) + -- self:super() + self.context = context + self.test_details = {} + self.lookup = {} + self.results = {} +end + +---@private +JunitTestParser.node_parsers = { + [MessageId.TestTree] = 'parse_test_tree', + [MessageId.TestStart] = 'parse_test_start', + [MessageId.TestEnd] = 'parse_test_end', + [MessageId.TestFailed] = 'parse_test_failed', + [MessageId.TestError] = 'parse_test_failed', +} + +---@private +JunitTestParser.strtobool = { + ['true'] = true, + ['false'] = false, +} + +---@private +function JunitTestParser._get_default_passed_test_output_path() + if not default_passed_test_output_path then + default_passed_test_output_path = async.fn.tempname() + lib.files.write(default_passed_test_output_path, default_passed_test_output) + end + return default_passed_test_output_path +end + +---@private +function JunitTestParser._split(str) + local result = {} + local current_match = {} + local escape = false + for i = 1, #str do + local c = str:sub(i, i) + if escape then + table.insert(current_match, c) + escape = false + elseif c == '\\' then + table.insert(current_match, c) + escape = true + elseif c == ',' then + if #current_match > 0 then + table.insert(result, table.concat(current_match)) + end + current_match = {} + else + table.insert(current_match, c) + end + end + if #current_match > 0 then + table.insert(result, table.concat(current_match)) + end + return result +end + +function JunitTestParser:_map_to_neotest_result_item(item) + -- if item.result == nil then + -- log.error('item', vim.inspect(item)) + -- -- return { + -- -- status = TestStatus.Skipped, + -- -- } + -- end + if item.result.status == TestStatus.Failed then + local results_path = async.fn.tempname() + lib.files.write(results_path, table.concat(item.result.trace, '\n')) + local short_message = get_short_error_message(item.result) + return { + status = TestStatus.Failed, + errors = { + { message = short_message }, + }, + output = results_path, + short = short_message, + } + elseif item.result.status == TestStatus.Skipped then + return { + status = TestStatus.Skipped, + } + else + local results_path + local log_data + if item.result.trace then + log_data = table.concat(item.result.trace, '\n') + results_path = async.fn.tempname() + else + log_data = default_passed_test_output + results_path = self._get_default_passed_test_output_path() + end + lib.files.write(results_path, log_data) + return { + status = TestStatus.Passed, + output = results_path, + } + end +end + +---@private +function JunitTestParser:parse_test_tree(data) + local node = { + test_id = tonumber(data[1]), + test_name = data[2], + is_suite = JunitTestParser.strtobool[data[3]], + test_count = tonumber(data[4]), + is_dynamic_test = JunitTestParser.strtobool[data[5]], + parent_id = tonumber(data[6]), + display_name = data[7], + parameter_types = data[8], + unique_id = data[#data], + } + assert(data.unique_id ~= '') + local parent = self:find_result_node(node.parent_id) + if not parent then + table.insert(self.test_details, node) + else + parent.children = parent.children or {} + table.insert(parent.children, node) + end +end + +---@private +function JunitTestParser.get_non_junit_test_method(project_name, message) + local method_name = message:match('(.+)%(') + local class_name = message:match('%((.+)%)') + if method_name ~= nil and class_name ~= nil then + return project_name .. '@' .. class_name .. '#' .. method_name + end + return project_name .. '@' .. message +end + +---@private +function JunitTestParser:parse_test_start(data) + local test_id = tonumber(data[1]) + local node = self:find_result_node(test_id) + assert(node) + node.result = {} + node.result.execution = TestExecStatus.Started +end + +function JunitTestParser.get_junit5_method_name(rawMethodName) + local raw_param_strings = rawMethodName:match('%((.+)%)') + if raw_param_strings == nil then + return rawMethodName + end + -- replease '$' with '.' for nested classes + raw_param_strings = raw_param_strings:gsub('%$', '.') + -- replease '\,' with ',' for escaped commas + raw_param_strings = raw_param_strings:gsub('\\,', ',') + local params = vim.split(raw_param_strings, ',') + local result = {} + for _, param in ipairs(params) do + param = param:gsub('%s+', '') + local p = param:match('([^%.]+)$') + if vim.startswith(param, '%5B') then + if vim.startswith(param, '%5BL') then + --- object + p = param:match('([^%.]+);') .. '[]' + else + p = array_lookup[param] + end + end + table.insert(result, p) + end + + local method_name = rawMethodName:match('(.+)%(') + return method_name .. '(' .. table.concat(result, ',') .. ')' +end + +--@param projectName string +--@param message string +function JunitTestParser.get_test_id_for_junit5_method(projectName, message) + -- [engine:junit5]/[class:com.example.MyTest]/[method:myTest]/[test-template:myTest(String\, int)] + local parts = vim.split(message, '/') + + local className = '' + local methodName = '' + local invocationSuffix = '' + + if #parts == 0 or parts[1] == '' then + -- error('Junit4 test method name is not supported') + return JunitTestParser.get_non_junit_test_method(projectName, message) + end + + for _, part in ipairs(parts) do + -- Remove the leading and trailing brackets. + part = part:match('%[(.-)%]') + + if vim.startswith(part, JUnitTestPart.CLASS) then + className = part:sub(#JUnitTestPart.CLASS + 1) + elseif vim.startswith(part, JUnitTestPart.METHOD) then + local rawMethodName = part:sub(#JUnitTestPart.METHOD + 1) + -- If the method name exists then we want to include the '#' qualifier. + methodName = '#' .. JunitTestParser.get_junit5_method_name(rawMethodName) + elseif vim.startswith(part, JUnitTestPart.TEST_FACTORY) then + local rawMethodName = part:sub(#JUnitTestPart.TEST_FACTORY + 1) + -- If the method name exists then we want to include the '#' qualifier. + methodName = '#' .. JunitTestParser.get_junit5_method_name(rawMethodName) + elseif vim.startswith(part, JUnitTestPart.NESTED_CLASS) then + local nestedClassName = part:sub(#JUnitTestPart.NESTED_CLASS + 1) + className = className .. '$' .. nestedClassName + elseif vim.startswith(part, JUnitTestPart.TEST_TEMPLATE) then + local rawMethodName = + part:sub(#JUnitTestPart.TEST_TEMPLATE + 1):gsub('\\,', ',') + -- If the method name exists then we want to include the '#' qualifier. + methodName = '#' .. JunitTestParser.get_junit5_method_name(rawMethodName) + elseif vim.startswith(part, JUnitTestPart.PROPERTY) then + local rawMethodName = + part:sub(#JUnitTestPart.PROPERTY + 1):gsub('\\,', ',') + -- If the method name exists then we want to include the '#' qualifier. + methodName = '#' .. JunitTestParser.get_junit5_method_name(rawMethodName) + elseif vim.startswith(part, JUnitTestPart.TEST_TEMPLATE_INVOCATION) then + invocationSuffix = invocationSuffix + .. '[' + .. part:sub(#JUnitTestPart.TEST_TEMPLATE_INVOCATION + 1) + .. ']' + elseif vim.startswith(part, JUnitTestPart.DYNAMIC_CONTAINER) then + invocationSuffix = invocationSuffix + .. '[' + .. part:sub(#JUnitTestPart.DYNAMIC_CONTAINER + 1) + .. ']' + elseif vim.startswith(part, JUnitTestPart.DYNAMIC_TEST) then + invocationSuffix = invocationSuffix + .. '[' + .. part:sub(#JUnitTestPart.DYNAMIC_TEST + 1) + .. ']' + end + end + -- log.error('methodName', methodName) + if className ~= '' then + return projectName .. '@' .. className .. methodName, invocationSuffix + else + return projectName .. '@' .. message, invocationSuffix + end +end + +---@private +function JunitTestParser:parse_test_end_junit5(node) + local success, id, invocation = pcall( + self.get_test_id_for_junit5_method, + self.context.project_name, + node.unique_id + ) + if not success then + log.error( + 'error during getTestIdForJunit5Method: %s, node: %', + id, + vim.inspect(node) + ) + else + local test_item = self.context.lookup[id] + if invocation and invocation ~= '' then + if not self.results[test_item.key] then + assert(node.is_dynamic_test) + self.results[test_item.key] = DynamicTestResult() + end + self.results[test_item.key]:append_invocation(invocation, node) + else + self.results[test_item.key] = node + end + end +end + +---@private +function JunitTestParser:parse_test_end_junit(node) + local success, id = pcall( + self.get_non_junit_test_method, + self.context.project_name, + node.unique_id + ) + + if not success then + log.error( + 'error during getTestIdForJunitMethod: %s, node: %', + id, + vim.inspect(node) + ) + else + local test_item = self.context.lookup[id] + self.results[test_item.key] = node + end +end + +---@private +function JunitTestParser:parse_test_end(data) + local test_id = tonumber(data[1]) + local node = self:find_result_node(test_id) + assert(node) + node.result.execution = TestExecStatus.Ended + if self.context.test_kind == TestKind.JUnit5 then + self:parse_test_end_junit5(node) + else + self:parse_test_end_junit(node) + end +end + +---@private +function JunitTestParser:parse_test_failed(data, line_iter) + local test_id = tonumber(data[1]) + local node = self:find_result_node(test_id) + assert(node) + + node.result.status = TestStatus.Failed + + while true do + local line = line_iter() + + if line == nil then + break + end + + -- EXPECTED + if vim.startswith(line, MessageId.ExpectStart) then + node.result.expected = JunitTestParser.get_content_until_end_tag( + MessageId.ExpectEnd, + line_iter + ) + + -- ACTUAL + elseif vim.startswith(line, MessageId.ActualStart) then + node.result.actual = JunitTestParser.get_content_until_end_tag( + MessageId.ActualEnd, + line_iter + ) + + -- TRACE + elseif vim.startswith(line, MessageId.TraceStart) then + node.result.trace = + JunitTestParser.get_content_until_end_tag(MessageId.TraceEnd, line_iter) + end + end +end + +---@private +function JunitTestParser.get_content_until_end_tag(end_tag, line_iter) + local content = {} + + while true do + local line = line_iter() + + if line == nil or vim.startswith(line, end_tag) then + break + end + + table.insert(content, line) + end + + return content +end + +---@private +function JunitTestParser:find_result_node(id) + local function find_node(nodes) + if not nodes or #nodes == 0 then + return + end + + for _, node in ipairs(nodes) do + if node.test_id == id then + return node + end + + local _node = find_node(node.children) + + if _node then + return _node + end + end + end + + return find_node(self.test_details) +end + +---@param text string test result buffer +function JunitTestParser:on_update(text) + if text:sub(-1) ~= '\n' then + text = text .. '\n' + end + + local line_iter = text:gmatch('(.-)\n') + local line = line_iter() + while line ~= nil do + local message_id = line:sub(1, 8):gsub('%s+', '') + local content = line:sub(9) + + local node_parser = JunitTestParser.node_parsers[message_id] + + if node_parser then + local data = self._split(content) + if self[JunitTestParser.node_parsers[message_id]] then + self[JunitTestParser.node_parsers[message_id]](self, data, line_iter) + end + end + + line = line_iter() + end +end + +function JunitTestParser:get_mapped_result() + local result = {} + for k, v in pairs(self.results) do + local data + if v.is_dynamic_test then + data = v:get_neotest_result() + else + data = self:_map_to_neotest_result_item(v) + end + result[k] = data + end + return result +end + +---@class java_test.TestResultExecutionDetails +---@field actual string[] lines +---@field expected string[] lines +---@field status java_test.TestExecStatus +---@field execution java_test.TestExecutionStatus +---@field trace string[] lines + +---@class java_test.TestResults +---@field display_name string +---@field is_dynamic_test boolean +---@field is_suite boolean +---@field parameter_types string +---@field parent_id integer +---@field test_count integer +---@field test_id integer +---@field test_name string +---@field unique_id string +---@field result java_test.TestResultExecutionDetails +---@field children java_test.TestResults[] + +return JunitTestParser diff --git a/lua/neotest-jdtls/junit/results/execution-status.lua b/lua/neotest-jdtls/junit/results/execution-status.lua deleted file mode 100644 index 846c65e..0000000 --- a/lua/neotest-jdtls/junit/results/execution-status.lua +++ /dev/null @@ -1,7 +0,0 @@ ----@enum java_test.TestExecutionStatus -local TestStatus = { - Started = 'started', - Ended = 'ended', -} - -return TestStatus diff --git a/lua/neotest-jdtls/junit/results/message-id.lua b/lua/neotest-jdtls/junit/results/message-id.lua deleted file mode 100644 index 28a37a5..0000000 --- a/lua/neotest-jdtls/junit/results/message-id.lua +++ /dev/null @@ -1,30 +0,0 @@ ----@enum MessageId -local MessageId = { - -- Notification about a test inside the test suite. - -- TEST_TREE + testId + "," + testName + "," + isSuite + "," + testCount + "," + isDynamicTest + - -- "," + parentId + "," + displayName + "," + parameterTypes + "," + uniqueId - - -- isSuite = "true" or "false" - -- isDynamicTest = "true" or "false" - -- parentId = the unique id of its parent if it is a dynamic test, otherwise can be "-1" - -- displayName = the display name of the test - -- parameterTypes = comma-separated list of method parameter types if applicable, otherwise an - -- empty string - -- uniqueId = the unique ID of the test provided by JUnit launcher, otherwise an empty string - - TestTree = '%TSTTREE', - TestStart = '%TESTS', - TestEnd = '%TESTE', - TestFailed = '%FAILED', - TestError = '%ERROR', - ExpectStart = '%EXPECTS', - ExpectEnd = '%EXPECTE', - ActualStart = '%ACTUALS', - ActualEnd = '%ACTUALE', - TraceStart = '%TRACES', - TraceEnd = '%TRACEE', - IGNORE_TEST_PREFIX = '@Ignore: ', - ASSUMPTION_FAILED_TEST_PREFIX = '@AssumptionFailure: ', -} - -return MessageId diff --git a/lua/neotest-jdtls/junit/results/result-parser-factory.lua b/lua/neotest-jdtls/junit/results/result-parser-factory.lua deleted file mode 100644 index c21113e..0000000 --- a/lua/neotest-jdtls/junit/results/result-parser-factory.lua +++ /dev/null @@ -1,14 +0,0 @@ -local class = require('neotest-jdtls.utils.class') -local TestParser = require('neotest-jdtls.junit.results.result-parser') - ----@class java_test.TestParserFactory -local TestParserFactory = class() - ----Returns a test parser of given type ----@param args any ----@return java_test.TestParser -function TestParserFactory.get_parser(_args) - return TestParser() -end - -return TestParserFactory diff --git a/lua/neotest-jdtls/junit/results/result-parser.lua b/lua/neotest-jdtls/junit/results/result-parser.lua deleted file mode 100644 index a72b336..0000000 --- a/lua/neotest-jdtls/junit/results/result-parser.lua +++ /dev/null @@ -1,200 +0,0 @@ -local class = require('neotest-jdtls.utils.class') - -local MessageId = require('neotest-jdtls.junit.results.message-id') -local TestStatus = require('neotest-jdtls.junit.results.result-status') -local TestExecStatus = require('neotest-jdtls.junit.results.execution-status') - ----@class java_test.TestParser ----@field private test_details java_test.TestResults[] -local TestParser = class() - ----Init ----@private -function TestParser:_init() - self.test_details = {} -end - ----@private -TestParser.node_parsers = { - [MessageId.TestTree] = 'parse_test_tree', - [MessageId.TestStart] = 'parse_test_start', - [MessageId.TestEnd] = 'parse_test_end', - [MessageId.TestFailed] = 'parse_test_failed', - [MessageId.TestError] = 'parse_test_failed', -} - ----@private -TestParser.strtobool = { - ['true'] = true, - ['false'] = false, -} - ----Parse a given text into test details ----@param text string test result buffer -function TestParser:parse(text) - if text:sub(-1) ~= '\n' then - text = text .. '\n' - end - local line_iter = text:gmatch('(.-)\n') - - local line = line_iter() - - while line ~= nil do - local message_id = line:sub(1, 8):gsub('%s+', '') - local content = line:sub(9) - - local node_parser = TestParser.node_parsers[message_id] - - if node_parser then - local data = vim.split(content, ',', { plain = true, trimempty = true }) - - if self[TestParser.node_parsers[message_id]] then - self[TestParser.node_parsers[message_id]](self, data, line_iter) - end - end - - line = line_iter() - end -end - ----Returns the parsed test details ----@return java_test.TestResults # parsed test details -function TestParser:get_test_details() - return self.test_details -end - ----@private -function TestParser:parse_test_tree(data) - local node = { - test_id = tonumber(data[1]), - test_name = data[2], - is_suite = TestParser.strtobool[data[3]], - test_count = tonumber(data[4]), - is_dynamic_test = TestParser.strtobool[data[5]], - parent_id = tonumber(data[6]), - display_name = data[7], - parameter_types = data[8], - unique_id = data[9], - } - - local parent = self:find_result_node(node.parent_id) - - if not parent then - table.insert(self.test_details, node) - else - parent.children = parent.children or {} - table.insert(parent.children, node) - end -end - ----@private -function TestParser:parse_test_start(data) - local test_id = tonumber(data[1]) - local node = self:find_result_node(test_id) - assert(node) - node.result = {} - node.result.execution = TestExecStatus.Started -end - ----@private -function TestParser:parse_test_end(data) - local test_id = tonumber(data[1]) - local node = self:find_result_node(test_id) - assert(node) - node.result.execution = TestExecStatus.Ended -end - ----@private -function TestParser:parse_test_failed(data, line_iter) - local test_id = tonumber(data[1]) - local node = self:find_result_node(test_id) - assert(node) - - node.result.status = TestStatus.Failed - - while true do - local line = line_iter() - - if line == nil then - break - end - - -- EXPECTED - if vim.startswith(line, MessageId.ExpectStart) then - node.result.expected = - TestParser.get_content_until_end_tag(MessageId.ExpectEnd, line_iter) - - -- ACTUAL - elseif vim.startswith(line, MessageId.ActualStart) then - node.result.actual = - TestParser.get_content_until_end_tag(MessageId.ActualEnd, line_iter) - - -- TRACE - elseif vim.startswith(line, MessageId.TraceStart) then - node.result.trace = - TestParser.get_content_until_end_tag(MessageId.TraceEnd, line_iter) - end - end -end - ----@private -function TestParser.get_content_until_end_tag(end_tag, line_iter) - local content = {} - - while true do - local line = line_iter() - - if line == nil or vim.startswith(line, end_tag) then - break - end - - table.insert(content, line) - end - - return content -end - ----@private -function TestParser:find_result_node(id) - local function find_node(nodes) - if not nodes or #nodes == 0 then - return - end - - for _, node in ipairs(nodes) do - if node.test_id == id then - return node - end - - local _node = find_node(node.children) - - if _node then - return _node - end - end - end - - return find_node(self.test_details) -end - -return TestParser - ----@class java_test.TestResultExecutionDetails ----@field actual string[] lines ----@field expected string[] lines ----@field status java_test.TestStatus ----@field execution java_test.TestExecutionStatus ----@field trace string[] lines - ----@class java_test.TestResults ----@field display_name string ----@field is_dynamic_test boolean ----@field is_suite boolean ----@field parameter_types string ----@field parent_id integer ----@field test_count integer ----@field test_id integer ----@field test_name string ----@field unique_id string ----@field result java_test.TestResultExecutionDetails ----@field children java_test.TestResults[] diff --git a/lua/neotest-jdtls/junit/results/result-status.lua b/lua/neotest-jdtls/junit/results/result-status.lua deleted file mode 100644 index 3017aa6..0000000 --- a/lua/neotest-jdtls/junit/results/result-status.lua +++ /dev/null @@ -1,7 +0,0 @@ ----@enum java_test.TestStatus -local TestStatus = { - Failed = 'failed', - Skipped = 'skipped', -} - -return TestStatus diff --git a/lua/neotest-jdtls/junit/runner.lua b/lua/neotest-jdtls/junit/runner.lua new file mode 100644 index 0000000..b0d37ad --- /dev/null +++ b/lua/neotest-jdtls/junit/runner.lua @@ -0,0 +1,40 @@ +local class = require('neotest-jdtls.utils.class') +local log = require('neotest-jdtls.utils.log') +local TestRunner = require('neotest-jdtls.utils.base_runner') +local JunitTestParser = require('neotest-jdtls.junit.result_parser') + +---@class JunitRunner : BaseRunner +local JunitRunner = class(TestRunner) + +function JunitRunner:_init() + self:super() +end + +--- @param launch_arguments JunitLaunchRequestArguments +--- @param is_debug boolean +--- @param executable string +--- @return table +function JunitRunner:get_dap_launcher_config( + launch_arguments, + is_debug, + executable +) + local dap_launcher_config + dap_launcher_config = + self.get_base_dap_launcher_config(launch_arguments, executable, { + debug = is_debug, + label = 'Launch All Java Tests', + }) + dap_launcher_config.args = dap_launcher_config.args:gsub( + '-port ([0-9]+)', + '-port ' .. self.server:getsockname().port + ) + log.debug('dap_launcher_config', vim.inspect(dap_launcher_config)) + return dap_launcher_config +end + +function JunitRunner:get_result_parser() + assert(self.context.test_kind, 'test_kind is nil') + return JunitTestParser(self.context) +end +return JunitRunner diff --git a/lua/neotest-jdtls/neotest/impl/excute.lua b/lua/neotest-jdtls/neotest/impl/excute.lua index 54eece8..866d146 100644 --- a/lua/neotest-jdtls/neotest/impl/excute.lua +++ b/lua/neotest-jdtls/neotest/impl/excute.lua @@ -1,259 +1,22 @@ -local JUnitReport = require('neotest-jdtls.junit.reports.junit') -local log = require('neotest-jdtls.utils.log') -local TestLevel = require('neotest-jdtls.utils.jdtls').TestLevel -local nio = require('nio') -local jdtls = require('neotest-jdtls.utils.jdtls') +local JunitRunner = require('neotest-jdtls.junit.runner') +local project = require('neotest-jdtls.utils.project') +local TestKind = require('neotest-jdtls.types.enums').TestKind local M = {} -local function get_dap_launcher_config(launch_args, java_exec, config) - return { - name = config.label, - type = 'java', - request = 'launch', - mainClass = launch_args.mainClass, - projectName = launch_args.projectName, - noDebug = not config.debug, - javaExec = java_exec, - cwd = launch_args.workingDirectory, - classPaths = launch_args.classpath, - modulePaths = launch_args.modulepath, - vmArgs = table.concat(launch_args.vmArguments, ' '), - args = table.concat(launch_args.programArguments, ' '), - -- env: config?.env, - -- envFile: config?.envFile, - -- sourcePaths: config?.sourcePaths, - -- preLaunchTask: config?.preLaunchTask, - -- postDebugTask: config?.postDebugTask, - } -end - -local function setup(server, dap_launcher_config, report) - server:bind('127.0.0.1', 0) - server:listen(128, function(err) - assert(not err, err) - local sock = assert(vim.loop.new_tcp(), 'uv.new_tcp must return handle') - server:accept(sock) - local success = sock:read_start(report:get_stream_reader(sock)) - assert(success == 0, 'failed to listen to reader') - end) - dap_launcher_config.args = dap_launcher_config.args:gsub( - '-port ([0-9]+)', - '-port ' .. server:getsockname().port - ) - return dap_launcher_config -end - ----@return JavaTestItem ----@param test_file_uri string -local function get_java_test_item(test_file_uri) - ---@type JavaTestItem - local java_test_items = jdtls.find_test_types_and_methods(test_file_uri) - return java_test_items -end - ---- @param test_file_uri string ---- @return JunitLaunchRequestArguments -local function handle_test(data, test_file_uri) - local java_test_items = get_java_test_item(test_file_uri) - assert(#java_test_items ~= 0, 'No test items found') - local java_test_item = java_test_items[1] - ---@type JavaTestItem - local closest_item = nil - local end_line = data.range[3] - for _, children in ipairs(java_test_item.children) do - if children.range['end'].line == end_line then - closest_item = children - break - end - closest_item = children - end - - return { - projectName = closest_item.projectName, - testLevel = TestLevel.Method, - testKind = closest_item.testKind, - testNames = { closest_item.jdtHandler }, - } -end - ---- @param test_file_uri string ---- @return JunitLaunchRequestArguments|nil -local function handle_dir(tree, test_file_uri) - local file_nodes = {} - for _, node in tree:iter_nodes() do - local node_data = node:data() - if - node_data.type == 'file' - and vim.startswith(vim.uri_from_fname(node_data.path), test_file_uri) - then - file_nodes[node_data.id] = vim.uri_from_fname(node_data.path) - end - end - local items = {} - local project_name = nil - local test_kind = nil - for _, url in pairs(file_nodes) do - local java_test_items = get_java_test_item(url) - if java_test_items and #java_test_items == 1 then - local java_test_item = java_test_items[1] - table.insert(items, java_test_item.fullName) - if project_name == nil then - project_name = java_test_item.projectName - end - if test_kind == nil then - test_kind = java_test_item.testKind - end - else - log.warn('Unexpected number of test items found for ', url) - end - end - - if #items == 0 then - log.warn('No project name found') - return nil - end - - return { - projectName = project_name, - testLevel = TestLevel.Class, - testKind = test_kind, - testNames = items, - } -end - ---- @param test_file_uri string ---- @return JunitLaunchRequestArguments|nil -local function handle_file(test_file_uri) - local java_test_items = get_java_test_item(test_file_uri) - if not java_test_items or #java_test_items == 0 then - log.info('No test items found') - return nil - end - local java_test_item = java_test_items[1] - return { - projectName = java_test_item.projectName, - testLevel = TestLevel.Class, - testKind = java_test_item.testKind, - testNames = { vim.split(java_test_item.fullName, '#')[1] }, - } -end - -local function shutdown_server(server) - if server then - server:shutdown() - server:close() - log.debug('server closed') - end -end - -local function run_test(dap_launcher_config, server) - local event = nio.control.event() - nio.run(function() - require('dap').run(dap_launcher_config, { - after = function(_) - shutdown_server(server) - event.set() - end, - }) - end) - event.wait() -end - ----@param test_file_uri string ----@return JunitLaunchRequestArguments|nil -local function resolve_junit_launch_arguments(tree, test_file_uri) - local data = tree:data() - ---@type JunitLaunchRequestArguments|nil - local arguments - if data.type == 'test' then - arguments = handle_test(data, test_file_uri) - elseif data.type == 'dir' then - arguments = handle_dir(tree, test_file_uri) - elseif data.type == 'file' or data.type == 'namespace' then - arguments = handle_file(test_file_uri) - else - error('Unsupported type: ' .. data.type) - end - if not arguments then - return nil - end - return jdtls.get_junit_launch_arguments(arguments) -end - ---@param args neotest.RunArgs ---@return neotest.RunSpec function M.build_spec(args) - local strategy = args.strategy - local tree = args and args.tree - local data = tree:data() - local test_file_uri = vim.uri_from_fname(data.path) - - local junit_launch_arguments = - resolve_junit_launch_arguments(tree, test_file_uri) - if not junit_launch_arguments then - return { - context = { - file = data.path, - pos_id = data.id, - type = data.type, - }, - } - end - - local executable = jdtls.resolve_java_executable( - junit_launch_arguments.mainClass, - junit_launch_arguments.projectName - ) - - local is_debug = strategy == 'dap' - local dap_launcher_config = - get_dap_launcher_config(junit_launch_arguments, executable, { - debug = is_debug, - label = 'Launch All Java Tests', - }) - log.debug('dap_launcher_config', vim.inspect(dap_launcher_config)) - local report = JUnitReport() - local server = assert(vim.loop.new_tcp(), 'uv.new_tcp() must return handle') - dap_launcher_config = setup(server, dap_launcher_config, report) - - local config = {} - if not is_debug then - -- TODO implement console for non-debug mode - -- local dapui = require('dapui') - -- local console_buf = dapui.elements.console.buffer() - run_test(dap_launcher_config, server) + -- local root = args.tree:root():data() TODO multimodule + local current_project = project.get_current_project() + assert(current_project.test_kind ~= TestKind.None) + local runner + if current_project.test_kind == TestKind.TestNG then + error('TestNG is not supported yet') else - dap_launcher_config.after = function() - nio.run(function() - shutdown_server(server) - end) - end - config = dap_launcher_config + runner = JunitRunner() end - - local context = { - file = data.path, - pos_id = data.id, - type = data.type, - report = report, - } - local response = { - context = context, - strategy = config, - } - return response + return runner:run(args) end return M - ---- @class JunitLaunchRequestArguments ---- @field projectName string ---- @field mainClass string ---- @field testLevel number ---- @field testKind string ---- @field testNames string[] - ----@class ResolvedMainClass ----@field mainClass string ----@field projectName string diff --git a/lua/neotest-jdtls/neotest/impl/results.lua b/lua/neotest-jdtls/neotest/impl/results.lua index 0eb894e..96df4a0 100644 --- a/lua/neotest-jdtls/neotest/impl/results.lua +++ b/lua/neotest-jdtls/neotest/impl/results.lua @@ -1,197 +1,29 @@ -local async = require('neotest.async') local log = require('neotest-jdtls.utils.log') -local lib = require('neotest.lib') -local project = require('neotest-jdtls.utils.project') -local jdtls = require('neotest-jdtls.utils.jdtls') -local nio = require('nio') - -local default_passed_test_output = - 'The console output is available in the DAP console.' ----@type string|nil -local default_passed_test_output_path = nil - +local TestStatus = require('neotest-jdtls.types.enums').TestStatus local M = {} ---- @enum TestStatus -local TestStatus = { - Failed = 'failed', - Skipped = 'skipped', - Passed = 'passed', -} - -local function get_default_passed_test_output_path() - if not default_passed_test_output_path then - default_passed_test_output_path = async.fn.tempname() - lib.files.write(default_passed_test_output_path, default_passed_test_output) - end - return default_passed_test_output_path -end - -local function get_short_error_message(result) - if result.actual and result.expected then - return string.format( - 'Expected: [%s] but was [%s]', - result.expected[1], - result.actual[1] - ) - end - local trace_result = '' - for idx, trace in ipairs(result.trace) do - trace_result = trace_result .. trace - if idx > 3 then - break - end - end - return trace_result -end - -local function map_to_neotest_result_item(item) - if item.result.status == TestStatus.Failed then - local results_path = async.fn.tempname() - lib.files.write(results_path, table.concat(item.result.trace, '\n')) - local short_message = get_short_error_message(item.result) - return { - status = TestStatus.Failed, - errors = { - { message = short_message }, - }, - output = results_path, - short = short_message, - } - elseif item.result.status == TestStatus.Skipped then - return { - status = TestStatus.Skipped, - } - else - local results_path - local log_data - if item.result.trace then - log_data = table.concat(item.result.trace, '\n') - results_path = async.fn.tempname() - else - log_data = default_passed_test_output - results_path = get_default_passed_test_output_path() - end - lib.files.write(results_path, log_data) - return { - status = TestStatus.Passed, - output = results_path, - } - end -end - -local function get_test_key_from_junit_result(test_name) - -- test_name format: "function_name(package.name.ClassName)" - log.debug('get_test_key_from_junit_result input:', test_name) - local function_name = test_name:match('^(.+)%(') -- Extract "function_name" - local class_name = test_name:match('%.([%w$]+)%)$') -- Extract "ClassName" - - assert(function_name, 'function name not found') - assert(class_name, 'class name not found') - local key = class_name .. '::' .. function_name - log.debug('get_test_key_from_junit_result output:', key) - return key -end - -local function get_test_key_from_neotest_id(test_id) - -- test_id format: "/path/to/file::class_name::function_name" - log.debug('get_test_key_from_neotest_id input:', test_id) - local key = test_id:match('::(.+)$') - log.debug('get_test_key_from_neotest_id output:', key) - return key -end - -local function group_and_map_test_results(test_result_lookup, suite) - for _, ch in ipairs(suite.children) do - if not ch.is_suite then - local key = get_test_key_from_junit_result(ch.test_name) - if test_result_lookup[key] == nil then - test_result_lookup[key] = {} - end - table.insert(test_result_lookup[key], map_to_neotest_result_item(ch)) - else - group_and_map_test_results(test_result_lookup, ch) - end - end -end - -local function merge_neotest_results(test_result_lookup, node_data) - log.debug('Before|Merging test results', vim.inspect(node_data)) - local key = get_test_key_from_neotest_id(node_data.id) - if test_result_lookup[key] == nil then - local root = jdtls.root_dir() - nio.scheduler() - local current = project.get_current_project() - local path = node_data.path:sub(#root + 2) - --- If the node type is 'dir', and not in the project test folders (it's means there are no tests in it) - --- then mark it as skipped. - if not current.test_folders[path] and node_data.type == 'dir' then - return { - status = TestStatus.Skipped, - } - end - return nil - end - - if #test_result_lookup[key] == 1 then - return test_result_lookup[key][1] - end - - local dynamic_test_result = { - status = TestStatus.Passed, - output = get_default_passed_test_output_path(), - } - for _, result in ipairs(test_result_lookup[key]) do - -- TODO merge stack traces - if result.status == TestStatus.Failed then - dynamic_test_result.status = TestStatus.Failed - dynamic_test_result.errors = result.errors - dynamic_test_result.output = result.output - break - end - end - return dynamic_test_result -end - ---@param spec neotest.RunSpec ---@param result neotest.StrategyResult ---@param tree neotest.Tree function M.results(spec, _, tree) log.debug('Parsing test results', vim.inspect(spec.context.report)) - default_passed_test_output_path = nil - --- Set the results to skipped if the report is not available + -- default_passed_test_output_path = nil + + -- - Set the results to skipped if the report is not available if not spec.context.report then local results = {} for _, node in tree:iter_nodes() do local node_data = node:data() results[node_data.id] = { status = TestStatus.Skipped, + message = 'Report not available', } end return results end - - local test_result_lookup = {} - local report = spec.context.report:get_results() - for _, item in ipairs(report) do - if item.children then - group_and_map_test_results(test_result_lookup, item) - elseif item.is_suite ~= nil and not item.is_suite then - -- The junit 4 test dosen't have children parameter - local key = get_test_key_from_junit_result(item.test_name) - test_result_lookup[key] = { map_to_neotest_result_item(item) } - end - end - - local results = {} - for _, node in tree:iter_nodes() do - local node_data = node:data() - local node_result = merge_neotest_results(test_result_lookup, node_data) - if node_result then - results[node_data.id] = node_result - end - end - return results + return spec.context.report() end return M + +---@class neotest.NodeData diff --git a/lua/neotest-jdtls/types/enums.lua b/lua/neotest-jdtls/types/enums.lua new file mode 100644 index 0000000..5f7ed4e --- /dev/null +++ b/lua/neotest-jdtls/types/enums.lua @@ -0,0 +1,28 @@ +local M = {} + +---@enum TestKind +M.TestKind = { + JUnit5 = 0, + JUnit = 1, + TestNG = 2, + None = 100, +} + +---@enum TestLevel +M.TestLevel = { + Workspace = 1, + WorkspaceFolder = 2, + Project = 3, + Package = 4, + Class = 5, + Method = 6, +} + +--- @enum neotest.TestStatus +M.TestStatus = { + Failed = 'failed', + Skipped = 'skipped', + Passed = 'passed', +} + +return M diff --git a/lua/neotest-jdtls/types/init.lua b/lua/neotest-jdtls/types/init.lua new file mode 100644 index 0000000..05937b9 --- /dev/null +++ b/lua/neotest-jdtls/types/init.lua @@ -0,0 +1,33 @@ +--- @class JunitLaunchRequestArguments +--- @field projectName string +--- @field mainClass string +--- @field testLevel number +--- @field testKind TestKind +--- @field testNames string[] + +---@class ResolvedMainClass +---@field mainClass string +---@field projectName string + +---@class JavaTestItem +---@field children JavaTestItem[] +---@field uri string +---@field range Range +---@field jdtHandler string +---@field fullName string +---@field label string +---@field id string +---@field projectName string +---@field testKind TestKind +---@field testLevel TestLevel +---@field sortText string +---@field uniqueId string +---@field natureIds string[] +--- +---@class Range +---@field start Position +---@field end Position + +---@class Position +---@field line number +---@field character number diff --git a/lua/neotest-jdtls/utils/base_parser.lua b/lua/neotest-jdtls/utils/base_parser.lua new file mode 100644 index 0000000..6b4ede9 --- /dev/null +++ b/lua/neotest-jdtls/utils/base_parser.lua @@ -0,0 +1,42 @@ +local class = require('neotest-jdtls.utils.class') +local log = require('neotest-jdtls.utils.log') + +---@class BaseParser +local BaseParser = class() + +---Returns a stream reader function +---@param conn uv_tcp_t +---@return fun(err: string, buffer: string) # callback function +function BaseParser:get_stream_reader(conn) + self.conn = conn + return vim.schedule_wrap(function(err, buffer) + if err then + self.conn:close() + return + end + + if buffer then + log.debug('buffer', buffer) + self:on_update(buffer) + else + self.conn:close() + log.debug('connection closed') + end + end) +end + +-- luacheck: ignore +---Runs on connection update +---@protected +---@param text string +function BaseParser:on_update(text) + error('Not implemented') +end + +---@protected +--- @return table +function BaseParser:get_mapped_result() + error('Not implemented') +end + +return BaseParser diff --git a/lua/neotest-jdtls/utils/base_runner.lua b/lua/neotest-jdtls/utils/base_runner.lua new file mode 100644 index 0000000..0b2a8bb --- /dev/null +++ b/lua/neotest-jdtls/utils/base_runner.lua @@ -0,0 +1,356 @@ +local class = require('neotest-jdtls.utils.class') +local log = require('neotest-jdtls.utils.log') +local TestKind = require('neotest-jdtls.types.enums').TestKind +local jdtls = require('neotest-jdtls.utils.jdtls') +local TestLevel = require('neotest-jdtls.types.enums').TestLevel +local nio = require('nio') +local TestContext = require('neotest-jdtls.utils.test_context') + +---@class BaseRunner +---@field context TestContext +---@field test_kind TestKind +---@field server uv_tcp_t +local BaseRunner = class() + +function BaseRunner:_init() + self.context = TestContext() +end + +---@protected +function BaseRunner.get_base_dap_launcher_config(launch_args, java_exec, config) + return { + name = config.label, + type = 'java', + request = 'launch', + mainClass = launch_args.mainClass, + projectName = launch_args.projectName, + noDebug = not config.debug, + javaExec = java_exec, + cwd = launch_args.workingDirectory, + classPaths = launch_args.classpath, + modulePaths = launch_args.modulepath, + vmArgs = table.concat(launch_args.vmArguments, ' '), + args = table.concat(launch_args.programArguments, ' '), + -- env:config?.env, + -- envFile:config?.envFile, + -- sourcePaths:config?.sourcePaths, + -- preLaunchTask:config?.preLaunchTask, + -- postDebugTask:config?.postDebugTask, + } +end + +function BaseRunner.test_item_to_neotest_id(java_test_item) + local nested_class + local id = java_test_item.id + local method_name + if java_test_item.testKind == TestKind.JUnit then + method_name = id:match('#(.+)$') + else + method_name = id:match('#(.+)%(') + end + + local class_full_path = id:match('@(.-)%$') + if class_full_path == nil then + class_full_path = id:match('@(.+)%#') + if class_full_path == nil then + class_full_path = id:match('@(.+)$') + end + else + nested_class = id:match('%$(.+)%#') + if nested_class == nil then + nested_class = id:match('%$(.+)$') + end + end + + local class_parts = {} + for part in class_full_path:gmatch('[^%.]+') do + table.insert(class_parts, part) + end + local class_name = class_parts[#class_parts] + + local uri = java_test_item.uri + local result = uri .. '::' .. class_name + + if nested_class ~= nil then + result = result .. '::' .. nested_class + end + + if method_name ~= nil then + result = result .. '::' .. method_name + end + return vim.uri_to_fname(result) +end + +---@private +function BaseRunner.load_lookup(id, java_test_item) + if not java_test_item.children then + return nil + end + for _, children in ipairs(java_test_item.children) do + local neotest_id = BaseRunner.test_item_to_neotest_id(children) + -- log.error('neotest_id', neotest_id) + if id == neotest_id then + return children + end + + local c = BaseRunner.load_lookup(id, children) + if c ~= nil then + return c + end + end +end + +---@private +---@param parser BaseParser +function BaseRunner:setup(parser) + self.server:bind('127.0.0.1', 0) + self.server:listen(128, function(err) + assert(not err, err) + local sock = assert(vim.loop.new_tcp(), 'uv.new_tcp must return handle') + self.server:accept(sock) + local success = sock:read_start(parser:get_stream_reader(sock)) + assert(success == 0, 'failed to listen to reader') + end) +end + +---@private +---@return JavaTestItem +---@param test_file_uri string +function BaseRunner.get_java_test_item(test_file_uri) + ---@type JavaTestItem + local java_test_items = jdtls.find_test_types_and_methods(test_file_uri) + log.debug('java_test_items', vim.inspect(java_test_items)) + return java_test_items +end + +---@private +--- @param test_file_uri string +--- @return JunitLaunchRequestArguments +function BaseRunner:handle_test(data, test_file_uri) + local java_test_items = self.get_java_test_item(test_file_uri) + + assert(#java_test_items ~= 0, 'No test items found') + + local java_test_item = java_test_items[1] + local closest_item = self.load_lookup(data.id, java_test_item) + + assert(closest_item, 'No test items found') + log.debug('closest_item', vim.inspect(closest_item)) + self.context:append_test_item(data.id, closest_item) + + local test_names + if self.context.test_kind == TestKind.TestNG then + test_names = { closest_item.fullName } + else + test_names = { closest_item.jdtHandler } + end + + return { + projectName = closest_item.projectName, + testLevel = TestLevel.Method, + testKind = closest_item.testKind, + testNames = test_names, + } +end + +function BaseRunner:find_all_children(java_test_item) + if not java_test_item.children then + return nil + end + for _, children in ipairs(java_test_item.children) do + local neotest_id = BaseRunner.test_item_to_neotest_id(children) + -- log.error(neotest_id) + self.context:append_test_item(neotest_id, children) + self:find_all_children(children) + end +end + +---@private +function BaseRunner:base_handle_file(test_file_uri) + local java_test_items = self.get_java_test_item(test_file_uri) + if not java_test_items or #java_test_items == 0 then + log.info('No test items found') + return nil + end + + local java_test_item = java_test_items[1] + self:find_all_children(java_test_item) + return java_test_item +end + +--- @return JunitLaunchRequestArguments|nil +function BaseRunner:handle_dir(tree) + local items = {} + local project_name = nil + local test_kind = nil + for _, node in tree:iter_nodes() do + local node_data = node:data() + if node_data.type == 'file' or node_data.type == 'namespace' then + local uri = vim.uri_from_fname(node_data.path) + local java_test_item = self:base_handle_file(uri) + if java_test_item ~= nil then + table.insert(items, java_test_item.fullName) + if project_name == nil then + project_name = java_test_item.projectName + end + if test_kind == nil then + test_kind = java_test_item.testKind + end + end + end + end + + return { + projectName = project_name, + testLevel = TestLevel.Class, + testKind = test_kind, + testNames = items, + } +end + +--- @param test_file_uri string +--- @return JunitLaunchRequestArguments|nil +function BaseRunner:handle_file(test_file_uri) + local java_test_item = self:base_handle_file(test_file_uri) + if java_test_item == nil then + return nil + end + return { + projectName = java_test_item.projectName, + testLevel = TestLevel.Class, + testKind = java_test_item.testKind, + testNames = { vim.split(java_test_item.fullName, '#')[1] }, + } +end + +function BaseRunner:shutdown_server() + if self.server then + self.server:shutdown() + self.server:close() + log.debug('server closed') + end +end + +function BaseRunner:run_test(dap_launcher_config) + local event = nio.control.event() + nio.run(function() + require('dap').run(dap_launcher_config, { + after = function(_) + self:shutdown_server() + event.set() + end, + }) + end) + event.wait() +end + +---@param test_file_uri string +---@return JunitLaunchRequestArguments|nil +function BaseRunner:resolve_junit_launch_arguments(tree, test_file_uri) + local data = tree:data() + ---@type JunitLaunchRequestArguments|nil + local arguments + if data.type == 'test' then + log.debug('type: test') + arguments = self:handle_test(data, test_file_uri) + elseif data.type == 'dir' then + log.debug('type: dir') + arguments = self:handle_dir(tree) + elseif data.type == 'file' or data.type == 'namespace' then + log.debug('type: file') + arguments = self:handle_file(test_file_uri) + else + error('Unsupported type: ' .. data.type) + end + if not arguments then + return nil + end + self.context.test_kind = arguments.testKind + self.context.project_name = arguments.projectName + return jdtls.get_junit_launch_arguments(arguments) +end + +-- luacheck: ignore +--- @param launch_arguments JunitLaunchRequestArguments +--- @param is_debug boolean +--- @param executable string +--- @return table +function BaseRunner:get_dap_launcher_config( + launch_arguments, + is_debug, + executable +) + error('Not implemented') +end + +-- luacheck: ignore +-- @return BaseParser +function BaseRunner:get_result_parser() + error('Not implemented') +end + +function BaseRunner:run(args) + local strategy = args.strategy + local tree = args and args.tree + local data = tree:data() + local test_file_uri = vim.uri_from_fname(data.path) + + local launch_arguments = + self:resolve_junit_launch_arguments(tree, test_file_uri) + assert( + launch_arguments and launch_arguments.testKind ~= TestKind.None, + 'Unsupported test kind' + ) + self.test_kind = launch_arguments.testKind + + log.debug('junit_launch_arguments', vim.inspect(launch_arguments)) + if not launch_arguments then + return { + context = { + file = data.path, + pos_id = data.id, + type = data.type, + }, + } + end + + local is_debug = strategy == 'dap' + local executable = jdtls.resolve_java_executable( + launch_arguments.mainClass, + launch_arguments.projectName + ) + + log.debug('test_context', vim.inspect(self.context)) + local parser = assert(self:get_result_parser(), 'parser is nil') -- Report(self:get_result_parser()) + self.server = assert(vim.loop.new_tcp(), 'uv.new_tcp() must return handle') + self:setup(parser) + + local dap_launcher_config = + self:get_dap_launcher_config(launch_arguments, is_debug, executable) + + local config = {} + if not is_debug then + -- TODO implement console for non-debug mode + -- local dapui = require('dapui') + -- local console_buf = dapui.elements.console.buffer() + self:run_test(dap_launcher_config) + else + dap_launcher_config.after = function() + nio.run(function() + self:shutdown_server() + end) + end + config = dap_launcher_config + end + local response = { + context = { + report = function() + return parser:get_mapped_result() + end, + }, + strategy = config, + } + return response +end + +return BaseRunner diff --git a/lua/neotest-jdtls/utils/jdtls.lua b/lua/neotest-jdtls/utils/jdtls.lua index 5a2db61..6a8d61b 100644 --- a/lua/neotest-jdtls/utils/jdtls.lua +++ b/lua/neotest-jdtls/utils/jdtls.lua @@ -3,24 +3,6 @@ local nio = require('nio') local JDTLS = {} ----@enum TestKind -JDTLS.TestKind = { - JUnit5 = 0, - JUnit = 1, - TestNG = 2, - None = 100, -} - ----@enum TestLevel -JDTLS.TestLevel = { - Workspace = 1, - WorkspaceFolder = 2, - Project = 3, - Package = 4, - Class = 5, - Method = 6, -} - function JDTLS.get_client() local clients = nio.lsp.get_clients({ name = 'jdtls' }) @@ -128,26 +110,3 @@ function JDTLS.resolve_java_executable(main_class, project_name) end return JDTLS - ----@class JavaTestItem ----@field children JavaTestItem[] ----@field uri string ----@field range Range ----@field jdtHandler string ----@field fullName string ----@field label string ----@field id string ----@field projectName string ----@field testKind TestKind ----@field testLevel TestLevel ----@field sortText string ----@field uniqueId string ----@field natureIds string[] ---- ----@class Range ----@field start Position ----@field end Position - ----@class Position ----@field line number ----@field character number diff --git a/lua/neotest-jdtls/utils/project.lua b/lua/neotest-jdtls/utils/project.lua index f9d0ef1..001790b 100644 --- a/lua/neotest-jdtls/utils/project.lua +++ b/lua/neotest-jdtls/utils/project.lua @@ -1,6 +1,6 @@ local log = require('neotest-jdtls.utils.log') local jdtls = require('neotest-jdtls.utils.jdtls') -local TestLevel = require('neotest-jdtls.utils.jdtls').TestLevel +local TestLevel = require('neotest-jdtls.types.enums').TestLevel local class = require('neotest-jdtls.utils.class') local M = { @@ -11,9 +11,18 @@ local M = { --- @field test_folders table -- --- @field methods table -- --- @field root_dir string +--- @field test_kind TestKind +--- @field project_name string +--- @field uri string local ProjectCache = class() -function ProjectCache:_init() +function ProjectCache:_init(project_name, test_kind, uri) + assert(project_name) + assert(test_kind) + assert(uri) + self.project_name = project_name + self.test_kind = test_kind + self.uri = uri self.methods = {} self.test_folders = {} end @@ -52,11 +61,12 @@ end local function load_current_project() log.debug('Project cache loading') - local cache = ProjectCache() local root = jdtls.root_dir() local project = jdtls.find_java_projects(root) - assert(#project == 1) + assert(#project == 1, 'Multimodule projects currently not supported') local jdtHandler = project[1].jdtHandler + local cache = + ProjectCache(project[1].projectName, project[1].testKind, project[1].uri) local data = jdtls.find_test_packages_and_types(jdtHandler) for _, package in ipairs(data) do diff --git a/lua/neotest-jdtls/utils/test_context.lua b/lua/neotest-jdtls/utils/test_context.lua new file mode 100644 index 0000000..e35da1d --- /dev/null +++ b/lua/neotest-jdtls/utils/test_context.lua @@ -0,0 +1,18 @@ +local class = require('neotest-jdtls.utils.class') + +---@class TestContext +---@field lookup table>> +---@field project_name string +---@field test_kind TestKind +local TestContext = class() + +function TestContext:_init() + self.lookup = {} +end + +---@param test_item JavaTestItem +function TestContext:append_test_item(key, test_item) + self.lookup[test_item.id] = { key = key, value = test_item } +end + +return TestContext diff --git a/tests/init.vim b/tests/init.vim new file mode 100644 index 0000000..5b10d82 --- /dev/null +++ b/tests/init.vim @@ -0,0 +1,2 @@ +runtime! plugin/plenary.vim +source tests/minimal_init.lua diff --git a/tests/junit/result_parser_spec.lua b/tests/junit/result_parser_spec.lua new file mode 100644 index 0000000..8c69a85 --- /dev/null +++ b/tests/junit/result_parser_spec.lua @@ -0,0 +1,77 @@ +local TestParser = require('neotest-jdtls.junit.result_parser') +local test_name_format = require('tests.utils').test_name_format +local TestKind = require('neotest-jdtls.types.enums').TestKind +local JunitTestParser = require('neotest-jdtls.junit.result_parser') +local test_case = require('tests.junit.test_case.result_parser_test_case') + +describe('JunitResultParser', function() + describe('test_kind: [Junit5]', function() + local parser = TestParser({ + test_kind = TestKind.JUnit5, + }) + + it('on_update', function() + for _, case in ipairs(test_case.test_cases_junit5) do + it(test_name_format(vim.inspect(case.expected), case.input), function() + parser:on_update(case.input) + assert.are.same(case.expected, parser.test_details[1]) + end) + end + end) + end) + + local test_cases_junit = { + { + + input = '%TSTTREE1,testApp(org.zrgs.maven.CopyOfAppTest),false,1,false,-1,testApp(org.zrgs.maven.CopyOfAppTest),,', + expected = { + display_name = 'testApp(org.zrgs.maven.CopyOfAppTest)', + is_dynamic_test = false, + is_suite = false, + parent_id = -1, + test_count = 1, + test_id = 1, + test_name = 'testApp(org.zrgs.maven.CopyOfAppTest)', + unique_id = 'testApp(org.zrgs.maven.CopyOfAppTest)', + }, + }, + } + + describe('test_kind: [juni4]', function() + local parser = TestParser({ + test_kind = TestKind.JUnit, + }) + + it('on_update', function() + for _, case in ipairs(test_cases_junit) do + it(test_name_format(vim.inspect(case.expected), case.input), function() + parser:on_update(case.input) + assert.are.same(case.expected, parser.test_details[1]) + end) + end + end) + end) + + it('get_test_id_for_junit5_method:', function() + local project_name = 'spring-petclinic' + local cases = test_case.get_test_id_for_junit_5method(project_name) + for _, case in ipairs(cases) do + it(test_name_format(case.expected, case.input), function() + local result = JunitTestParser.get_test_id_for_junit5_method( + project_name, + case.input + ) + assert.equals(case.expected, result) + end) + end + end) + + it('get_junit5_method_name:', function() + for _, case in ipairs(test_case.get_junit5_method_name) do + it(test_name_format(case.expected, case.input), function() + local result = JunitTestParser.get_junit5_method_name(case.input) + assert.equals(case.expected, result) + end) + end + end) +end) diff --git a/tests/junit/runner_spec.lua b/tests/junit/runner_spec.lua new file mode 100644 index 0000000..9c0b627 --- /dev/null +++ b/tests/junit/runner_spec.lua @@ -0,0 +1,15 @@ +local JunitRunner = require('neotest-jdtls.junit.runner') +local test_case = require('tests.junit.test_case.runner_test_case') +local test_name_format = require('tests.utils').test_name_format + +describe('JunitRunner:', function() + it('test_id_to_neotest_id: ', function() + local casaes = test_case.test_id_to_neotest_id() + for _, case in ipairs(casaes) do + it(test_name_format(case.expected, case.input.id), function() + local result = JunitRunner.test_item_to_neotest_id(case.input) + assert.equals(case.expected, result) + end) + end + end) +end) diff --git a/tests/junit/test_case/result_parser_test_case.lua b/tests/junit/test_case/result_parser_test_case.lua new file mode 100644 index 0000000..235ee52 --- /dev/null +++ b/tests/junit/test_case/result_parser_test_case.lua @@ -0,0 +1,126 @@ +local M = {} + +M.test_cases_junit5 = { + { + input = '%TSTTREE18,provideArguments(dev.sirosh.case_folders.CaseFolderSourceArgumentsProviderTest),false,1,true,3,two param test,java.lang.reflect.Method\\, java.util.List,[engine:junit-jupiter]/[class:dev.sirosh.case_folders.CaseFolderSourceArgumentsProviderTest]/[test-template:provideArguments(java.lang.reflect.Method\\, java.util.List)]/[test-template-invocation:#15]', + expected = { + display_name = 'two param test', + is_dynamic_test = true, + is_suite = false, + parameter_types = 'java.lang.reflect.Method\\, java.util.List', + parent_id = 3, + test_count = 1, + test_id = 18, + test_name = 'provideArguments(dev.sirosh.case_folders.CaseFolderSourceArgumentsProviderTest)', + unique_id = '[engine:junit-jupiter]/[class:dev.sirosh.case_folders.CaseFolderSourceArgumentsProviderTest]/[test-template:provideArguments(java.lang.reflect.Method\\, java.util.List)]/[test-template-invocation:#15]', + }, + }, +} + +function M.get_test_id_for_junit_5method(project_name) + return { + { + input = '[engine:junit-jupiter]/[class:org.springframework.samples.petclinic.owner.PetControllerTests]/[nested-class:ProcessCreationFormHasErrors]/[method:testProcessCreationFormWithBlankName()]', + expected = project_name + .. '@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors#testProcessCreationFormWithBlankName()', + }, + { + input = '[engine:junit-jupiter]/[class:org.springframework.samples.petclinic.owner.PetControllerTests]/[method:testProcessCreationFormSuccess()]', + expected = project_name + .. '@org.springframework.samples.petclinic.owner.PetControllerTests#testProcessCreationFormSuccess()', + }, + } +end + +M.get_junit5_method_name = { + -- simple + { + input = 'methodName(java.lang.String,java.lang.String)', + expected = 'methodName(String,String)', + }, + { + input = 'methodName(java.lang.String, java.lang.String,java.lang.Long)', + expected = 'methodName(String,String,Long)', + }, + -- Object[] + { + input = 'methodName(%5BLjava.lang.Byte;)', + expected = 'methodName(Byte[])', + }, + { + input = 'methodName(%5BLjava.lang.String;)', + expected = 'methodName(String[])', + }, + { + input = 'methodName(%5BLjava.lang.Integer;)', + expected = 'methodName(Integer[])', + }, + { + input = 'methodName(%5BLjava.lang.Long;)', + expected = 'methodName(Long[])', + }, + { + input = 'methodName(%5BLjava.lang.Float;)', + expected = 'methodName(Float[])', + }, + { + input = 'methodName(%5BLjava.lang.Double;)', + expected = 'methodName(Double[])', + }, + { + input = 'methodName(%5BLjava.lang.Short;)', + expected = 'methodName(Short[])', + }, + { + input = 'methodName(%5BLjava.lang.Character;)', + expected = 'methodName(Character[])', + }, + { + input = 'methodName(%5BLjava.lang.Boolean;)', + expected = 'methodName(Boolean[])', + }, + -- primitive[] + { + input = 'methodName(%5BZ)', + expected = 'methodName(boolean[])', + }, + { + input = 'methodName(%5BB)', + expected = 'methodName(byte[])', + }, + { + input = 'methodName(%5BS)', + expected = 'methodName(short[])', + }, + { + input = 'methodName(%5BI)', + expected = 'methodName(int[])', + }, + { + input = 'methodName(%5BJ)', + expected = 'methodName(long[])', + }, + { + input = 'methodName(%5BF)', + expected = 'methodName(float[])', + }, + { + input = 'methodName(%5BD)', + expected = 'methodName(double[])', + }, + { + input = 'methodName(%5BC)', + expected = 'methodName(char[])', + }, + { + input = 'methodName(%5BF, %5BD)', + expected = 'methodName(float[],double[])', + }, + -- mixed + { + input = 'methodName(java.lang.Integer, %5BLjava.lang.Short;, %5BZ, java.lang.String)', + expected = 'methodName(Integer,Short[],boolean[],String)', + }, +} + +return M diff --git a/tests/junit/test_case/runner_test_case.lua b/tests/junit/test_case/runner_test_case.lua new file mode 100644 index 0000000..9b7b214 --- /dev/null +++ b/tests/junit/test_case/runner_test_case.lua @@ -0,0 +1,114 @@ +local M = {} + +function M.test_id_to_neotest_id() + local path = '/home/username/' + local mock_uri = 'file:/' .. path .. 'PetControllerTests.java' + + return { + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests#testInitCreationForm()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::testInitCreationForm', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests#testProcessCreationFormSuccess()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::testProcessCreationFormSuccess', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests#testProcessUpdateFormSuccess()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::testProcessUpdateFormSuccess', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors#testInitUpdateForm()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessCreationFormHasErrors::testInitUpdateForm', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors#testProcessCreationFormWithBlankName()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessCreationFormHasErrors::testProcessCreationFormWithBlankName', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors#testProcessCreationFormWithInvalidBirthDate()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessCreationFormHasErrors::testProcessCreationFormWithInvalidBirthDate', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors#testProcessCreationFormWithMissingPetType()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessCreationFormHasErrors::testProcessCreationFormWithMissingPetType', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessCreationFormHasErrors', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessCreationFormHasErrors', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessUpdateFormHasErrors#testProcessUpdateFormWithBlankName()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessUpdateFormHasErrors::testProcessUpdateFormWithBlankName', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessUpdateFormHasErrors#testProcessUpdateFormWithInvalidBirthDate()', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessUpdateFormHasErrors::testProcessUpdateFormWithInvalidBirthDate', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests$ProcessUpdateFormHasErrors', + uri = mock_uri, + }, + expected = path + .. 'PetControllerTests.java::PetControllerTests::ProcessUpdateFormHasErrors', + }, + { + input = { + id = 'spring-petclinic@org.springframework.samples.petclinic.owner.PetControllerTests', + uri = mock_uri, + }, + expected = path .. 'PetControllerTests.java::PetControllerTests', + }, + { + input = { + id = 'spring-petclinic@com.mock.PetControllerTests#testApp', + uri = mock_uri, + testKind = 1, + }, + expected = path .. 'PetControllerTests.java::PetControllerTests::testApp', + }, + } +end + +return M diff --git a/tests/minimal_init.lua b/tests/minimal_init.lua new file mode 100644 index 0000000..494a92a --- /dev/null +++ b/tests/minimal_init.lua @@ -0,0 +1,12 @@ +local lazypath = vim.fn.stdpath('data') .. '/lazy' +vim.notify = print +vim.opt.rtp:append('.') +vim.opt.rtp:append(lazypath .. '/plenary.nvim') +vim.opt.rtp:append(lazypath .. '/nvim-nio') +vim.opt.rtp:append(lazypath .. '/neotest') + +vim.opt.swapfile = false +vim.cmd('runtime! plugin/plenary.vim') +A = function(...) + print(vim.inspect(...)) +end diff --git a/tests/utils.lua b/tests/utils.lua new file mode 100644 index 0000000..ee082ee --- /dev/null +++ b/tests/utils.lua @@ -0,0 +1,7 @@ +local M = {} + +function M.test_name_format(expected, input) + return string.format('\nexpected: %s\ninput %s\n', expected, input) +end + +return M