Merge pull request 'Implement function parameters scraping (#19)' (#25) from feat/#19 into master
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details

Reviewed-on: #25
This commit is contained in:
Aire-One 2022-10-30 17:08:24 +01:00
commit 697e5d1a27
3 changed files with 35 additions and 5 deletions

View File

@ -6,7 +6,7 @@ local record Function_Info
Function_Info: Function_Info
record Parameter
name: string
type: string
types: List<string>
end
name: string
@ -27,10 +27,10 @@ local __Function_Info: metatable<Function_Info> = {
end,
}
function Function_Info:append_parameter(name: string, type: string)
function Function_Info:append_parameter(name: string, types: List<string>)
self.parameters:append {
name = name,
type = type,
types = types,
}
end

View File

@ -30,7 +30,7 @@ function snippets.render_anonymous_function_signature(item: Function_Info.Functi
local tmpl_args = {
function_name = item.name,
function_parameter = item.parameters:map(function(param: Function_Info.Parameter): string
return snippets.render_typed_variable(param.name, List({param.type})) -- TODO : add support for multiple types
return snippets.render_typed_variable(param.name, param.types)
end):concat(", "),
function_return = item.return_types:concat(", "),
}

View File

@ -4,6 +4,7 @@ local logger = require "logger"
local Module_Doc = require "entity.Module_Doc"
local scan = require "web_sanitize.query.scan_html"
local scraper_utils = require "scraper.utils"
local stringx = require "pl.stringx"
local utils = require "utils"
local log = logger.log("scraper")
@ -12,10 +13,36 @@ local function extract_node_text(node: scan.HTMLNode): string
return utils.sanitize_string(node:inner_text())
end
local function parse_parameter_types(parameter_type: string): List<string>
if parameter_type == "" then
return List({ "any" })
end
return stringx.split(parameter_type, " or "):map(utils.sanitize_string)
end
local function extract_function_name(function_name_node: scan.HTMLNode): string
return function_name_node and ((function_name_node.attr.name as string):gsub(".*:", ""))
end
local function extract_function_parameters(function_parameters_node: scan.HTMLNode): { Function_Info.Parameter }
local query_selectors = {
name = "span.parameter",
types = "span.types"
}
return scraper_utils.scrape_tuples(
function_parameters_node:outer_html(),
{ query_selectors.name, query_selectors.types },
function(nodes: { string : scan.HTMLNode | nil }): Function_Info.Parameter
return {
name = extract_node_text(nodes[query_selectors.name]),
types = parse_parameter_types(extract_node_text(nodes[query_selectors.types])),
}
end
)
end
local function extract_function_return_types(function_return_types_node: scan.HTMLNode): { string }
if not function_return_types_node then
return {}
@ -30,17 +57,20 @@ end
local function extract_section_functions(dl: string): { Function_Info.Function_Info }
local query_selectors = {
function_name = "dt a",
function_parameters = "dd table",
function_return_type = "dd ol",
}
return scraper_utils.scrape_tuples(
dl,
{ query_selectors.function_name, query_selectors.function_return_type },
{ query_selectors.function_name, query_selectors.function_parameters, query_selectors.function_return_type },
function(nodes: { string : scan.HTMLNode | nil }): Function_Info.Function_Info
local function_info = Function_Info()
function_info.name =
extract_function_name(nodes[query_selectors.function_name])
function_info.parameters =
List(extract_function_parameters(nodes[query_selectors.function_parameters]))
function_info.return_types = List(
extract_function_return_types(
nodes[query_selectors.function_return_type]