scraper
This commit is contained in:
parent
3056d48d5a
commit
5b32f807b1
|
@ -1,4 +1,6 @@
|
|||
local curl = require "cURL"
|
||||
local inspect = require "inspect"
|
||||
local log = require "logger"
|
||||
|
||||
local crawler = {}
|
||||
|
||||
|
@ -17,7 +19,7 @@ function crawler.request(url)
|
|||
local code, body = easy:getinfo_response_code(), table.concat(queue)
|
||||
easy:close()
|
||||
|
||||
if code ~= 200 then
|
||||
if code < 200 and code >= 300 then
|
||||
error {
|
||||
message = "curl response code is not 200",
|
||||
code = code,
|
||||
|
@ -28,4 +30,17 @@ function crawler.request(url)
|
|||
return queue
|
||||
end
|
||||
|
||||
function crawler.fetch(url)
|
||||
local success, result = pcall(crawler.request, url)
|
||||
|
||||
if not success then
|
||||
log:error(inspect { "Fetch failed", status = success, error = result })
|
||||
return
|
||||
end
|
||||
|
||||
log:info(inspect { message = "Successfully fetched resource", url = url })
|
||||
|
||||
return table.concat(result, "")
|
||||
end
|
||||
|
||||
return crawler
|
||||
|
|
|
@ -1,19 +1,31 @@
|
|||
local crawler = require "crawler"
|
||||
local inspect = require "inspect"
|
||||
local log = require "logger"
|
||||
local properties = require "properties"
|
||||
local utils = require "utils"
|
||||
local scraper = require "scraper"
|
||||
|
||||
log:info(
|
||||
inspect { message = "Start extraction", base_url = properties.base_url }
|
||||
)
|
||||
|
||||
local index = utils.fetch(properties.base_url .. properties.index_uri)
|
||||
local modules = utils.get_modules_from_index(index, properties.ignored_modules)
|
||||
-- local index = crawler.fetch(properties.base_url .. properties.index_uri)
|
||||
-- local modules = scraper.get_modules_from_index(
|
||||
-- index,
|
||||
-- properties.ignored_modules
|
||||
-- )
|
||||
|
||||
log:info(inspect { modules_found = #modules })
|
||||
-- log:info(inspect { modules_found = #modules })
|
||||
|
||||
local m = modules[1]
|
||||
log:info(inspect { try = m })
|
||||
local page = utils.fetch(properties.base_url .. "/" .. m.uri)
|
||||
local items = utils.get_items_from_page(page)
|
||||
-- for i = 1, 1 do -- #modules do
|
||||
-- local m = modules[i]
|
||||
-- log:info(inspect { try = m })
|
||||
-- local page = crawler.fetch(properties.base_url .. "/" .. m.uri)
|
||||
-- local items = scraper.get_doc_from_page(page)
|
||||
-- log:info(inspect { items })
|
||||
-- end
|
||||
|
||||
local page = crawler.fetch(
|
||||
properties.base_url .. "/widgets/awful.widget.button.html"
|
||||
)
|
||||
local items = scraper.get_doc_from_page(page)
|
||||
log:info(inspect { items })
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
local properties = {}
|
||||
|
||||
properties.base_url = "https://awesomewm.org/apidoc"
|
||||
-- properties.base_url = "https://awesomewm.org/apidoc"
|
||||
properties.base_url = "file:///usr/share/doc/awesome/doc"
|
||||
|
||||
properties.index_uri = "/index.html"
|
||||
|
||||
|
|
|
@ -1,18 +1,209 @@
|
|||
local htmlparser = require "htmlparser"
|
||||
local log = require "logger"
|
||||
local utils = require "utils"
|
||||
|
||||
local scraper = {}
|
||||
|
||||
function scraper.extract_nodes(document, selector, extractor)
|
||||
local nodes = document(selector)
|
||||
local extracts = {}
|
||||
function scraper.get_modules_from_index(html, ignored)
|
||||
local document = htmlparser.parse(html)
|
||||
|
||||
for _, node in ipairs(nodes) do
|
||||
local data = extractor(node)
|
||||
local modules = utils.map(document "#navigation ul > li a", function(node)
|
||||
return {
|
||||
name = utils.sanitize_string(node:getcontent()),
|
||||
uri = node.attributes.href,
|
||||
}
|
||||
end)
|
||||
|
||||
if data then
|
||||
table.insert(extracts, data)
|
||||
local filtered_modules = utils.filter(modules, function(module)
|
||||
return not utils.has_item(ignored, module.name)
|
||||
end)
|
||||
|
||||
return filtered_modules
|
||||
end
|
||||
|
||||
local function extract_first_node_or_fail(node, selector)
|
||||
local extracted = node(selector)[1]
|
||||
|
||||
if not extracted then
|
||||
log:error {
|
||||
message = "Can't find `" .. selector .. "` element!",
|
||||
node = node:gettext(),
|
||||
}
|
||||
error "extract_first_node_or_fail"
|
||||
end
|
||||
|
||||
return extracted
|
||||
end
|
||||
|
||||
local function extract_first_node(node, selector)
|
||||
local extracted = node(selector)[1]
|
||||
|
||||
if not extracted then
|
||||
return nil
|
||||
end
|
||||
|
||||
return extracted
|
||||
end
|
||||
|
||||
local function extract_inner_floating_text(node)
|
||||
local html = node:getcontent()
|
||||
|
||||
-- Remove inner tags from the html
|
||||
for _, n in ipairs(node.nodes) do
|
||||
html = utils.replace(html, n:gettext(), "")
|
||||
end
|
||||
|
||||
return utils.sanitize_string(html)
|
||||
end
|
||||
|
||||
local function extract_item_content_parameter_list(content)
|
||||
for i, n in ipairs(content.nodes) do
|
||||
-- The parameters <ul> is the next element after one of these <h3>
|
||||
if
|
||||
n.name == "h3"
|
||||
and utils.has_item(
|
||||
{ "Parameters:", "Type constraints:", "Arguments" },
|
||||
utils.sanitize_string(n:getcontent())
|
||||
)
|
||||
then
|
||||
return content.nodes[i + 1]
|
||||
end
|
||||
end
|
||||
|
||||
return extracts
|
||||
return nil
|
||||
end
|
||||
|
||||
local function process_item_header_type(node)
|
||||
local types_node = extract_first_node(node, "span.summary_type")
|
||||
|
||||
if not types_node then
|
||||
return nil
|
||||
end
|
||||
|
||||
local types_string = string.match(types_node:getcontent(), "%((.-)%)")
|
||||
or types_node:getcontent()
|
||||
types_string = utils.replace(types_string, "or", ",")
|
||||
local splitted = string.gmatch(types_string, "([^,]+)")
|
||||
|
||||
local types = {}
|
||||
for t in splitted do
|
||||
table.insert(types, utils.sanitize_string(t))
|
||||
end
|
||||
|
||||
return types
|
||||
end
|
||||
|
||||
local function process_item_header(node)
|
||||
local name = extract_first_node_or_fail(node, "strong")
|
||||
local type = process_item_header_type(node)
|
||||
|
||||
local item_header = {
|
||||
name = extract_inner_floating_text(name),
|
||||
type = type,
|
||||
}
|
||||
|
||||
return item_header
|
||||
end
|
||||
|
||||
local function process_item_content_parameters(node)
|
||||
local parameter = {}
|
||||
|
||||
local name = extract_first_node_or_fail(node, "span.parameter")
|
||||
parameter.name = utils.sanitize_string(name:getcontent())
|
||||
|
||||
local nested_parameters_list = extract_first_node(node, "ul")
|
||||
if nested_parameters_list then
|
||||
parameter.type = "table"
|
||||
parameter.nested_parameters = utils.map(
|
||||
nested_parameters_list "* > li",
|
||||
process_item_content_parameters
|
||||
)
|
||||
|
||||
return parameter
|
||||
end
|
||||
|
||||
local types_list = utils.map(node ".type", function(type)
|
||||
return utils.sanitize_string(type:getcontent())
|
||||
end)
|
||||
if #types_list > 0 then
|
||||
parameter.type = types_list
|
||||
end
|
||||
|
||||
return parameter
|
||||
end
|
||||
|
||||
local function process_item_content(content)
|
||||
local parameters_list = extract_item_content_parameter_list(content)
|
||||
local parameters = parameters_list
|
||||
and utils.map(
|
||||
parameters_list "* > li",
|
||||
process_item_content_parameters
|
||||
)
|
||||
or {}
|
||||
|
||||
local item_content = {
|
||||
parameters = #parameters > 0 and parameters or nil,
|
||||
}
|
||||
|
||||
return item_content
|
||||
end
|
||||
|
||||
local function process_section_titles(document)
|
||||
return utils.map(document "h2.section-header", extract_inner_floating_text)
|
||||
end
|
||||
|
||||
local function process_section_items(item)
|
||||
local headers = utils.map(item "dt", process_item_header)
|
||||
local contents = utils.map(item "dd", process_item_content)
|
||||
|
||||
if #headers ~= #contents then
|
||||
log:error {
|
||||
message = "extract_item_content failure: headers and contents don't have the same size",
|
||||
headers = #headers,
|
||||
contents = #contents,
|
||||
}
|
||||
error "extract_item_content"
|
||||
end
|
||||
|
||||
local item_contents = utils.map(headers, function(header, i)
|
||||
return {
|
||||
name = header.name,
|
||||
type = header.type,
|
||||
parameters = contents[i].parameters,
|
||||
}
|
||||
end)
|
||||
|
||||
return item_contents
|
||||
end
|
||||
|
||||
local function process_section_contents(document)
|
||||
local section_items = document "dl.function"
|
||||
|
||||
local section_contents = utils.map(section_items, process_section_items)
|
||||
|
||||
return section_contents
|
||||
end
|
||||
|
||||
function scraper.get_doc_from_page(html)
|
||||
local document = htmlparser.parse(html, 9999)
|
||||
|
||||
local sections_titles = process_section_titles(document)
|
||||
local section_contents = process_section_contents(document)
|
||||
|
||||
if #sections_titles ~= #section_contents then
|
||||
log:error {
|
||||
message = "get_items_from_page failure: section_titles and section_contents don't have the same size",
|
||||
sections_titles = #sections_titles,
|
||||
func = #section_contents,
|
||||
}
|
||||
error "get_items_from_page"
|
||||
end
|
||||
|
||||
local doc = utils.map(sections_titles, function(title, i)
|
||||
return { sections = title, items = section_contents[i] }
|
||||
end)
|
||||
|
||||
return doc
|
||||
end
|
||||
|
||||
return scraper
|
||||
|
|
|
@ -1,8 +1,3 @@
|
|||
local crawler = require "crawler"
|
||||
local htmlparser = require "htmlparser"
|
||||
local inspect = require "inspect"
|
||||
local log = require "logger"
|
||||
local scraper = require "scraper"
|
||||
local web_sanitize = require "web_sanitize"
|
||||
|
||||
local utils = {}
|
||||
|
@ -17,70 +12,47 @@ function utils.has_item(table, item)
|
|||
return nil
|
||||
end
|
||||
|
||||
function utils.sanitize_page_name(string)
|
||||
return (web_sanitize.extract_text(string):gsub("^%s*(.-)%s*$", "%1"))
|
||||
end
|
||||
function utils.filter(list, predicate)
|
||||
local filtered = {}
|
||||
|
||||
function utils.fetch(url)
|
||||
local success, result = pcall(crawler.request, url)
|
||||
|
||||
if not success then
|
||||
log:error(inspect { "fetch failed", status = success, error = result })
|
||||
return
|
||||
for position, value in ipairs(list) do
|
||||
if predicate(value, position) then
|
||||
table.insert(filtered, value)
|
||||
end
|
||||
end
|
||||
|
||||
log:info(inspect { message = "successfully fetched resource", url = url })
|
||||
|
||||
return table.concat(result, "")
|
||||
return filtered
|
||||
end
|
||||
|
||||
function utils.get_modules_from_index(html, ignored)
|
||||
local document = htmlparser.parse(html)
|
||||
function utils.map(list, iteratee)
|
||||
local mapped = {}
|
||||
|
||||
local modules = scraper.extract_nodes(
|
||||
document,
|
||||
"#navigation ul > li a",
|
||||
function(node)
|
||||
if node.name ~= "a" then
|
||||
return nil
|
||||
end
|
||||
for position, value in ipairs(list) do
|
||||
table.insert(mapped, iteratee(value, position))
|
||||
end
|
||||
|
||||
local name = utils.sanitize_page_name(node:getcontent())
|
||||
|
||||
if utils.has_item(ignored, name) then
|
||||
return nil
|
||||
end
|
||||
|
||||
local module = {
|
||||
name = name,
|
||||
uri = node.attributes.href,
|
||||
}
|
||||
|
||||
return module
|
||||
end
|
||||
)
|
||||
|
||||
return modules
|
||||
return mapped
|
||||
end
|
||||
|
||||
function utils.get_items_from_page(html)
|
||||
local document = htmlparser.parse(html, 9999)
|
||||
|
||||
local titles = scraper.extract_nodes(
|
||||
document,
|
||||
"h2.section-header",
|
||||
function(node)
|
||||
return {
|
||||
name = node.name,
|
||||
}
|
||||
end
|
||||
function utils.sanitize_string(string)
|
||||
return utils.trim(
|
||||
utils.replace(web_sanitize.extract_text(string), "^%s*(.-)%s*$", "%1")
|
||||
)
|
||||
end
|
||||
|
||||
local items = scraper.extract_nodes(document, "dl.function", function(node)
|
||||
return { name = node.name }
|
||||
end)
|
||||
-- Extracted from teh Penlight Lua library.
|
||||
-- Sometime Lua string.gsub can't match unescaped strings.
|
||||
-- https://stackoverflow.com/a/72666170
|
||||
function utils.escape(string)
|
||||
return (string:gsub("[%-%.%+%[%]%(%)%$%^%%%?%*]", "%%%1"))
|
||||
end
|
||||
|
||||
return { titles, items }
|
||||
function utils.replace(string, old, new, n)
|
||||
return (string:gsub(utils.escape(old), new:gsub("%%", "%%%%"), n))
|
||||
end
|
||||
|
||||
function utils.trim(string)
|
||||
return string:match "^%s*(.-)%s*$"
|
||||
end
|
||||
|
||||
return utils
|
||||
|
|
Loading…
Reference in New Issue