Fix module name #15
|
@ -1,4 +1,4 @@
|
|||
local Function_Info = require "entities.Function_Info"
|
||||
local Function_Info = require "entity.Function_Info"
|
||||
local List = require "pl.List"
|
||||
|
||||
local record Module_Doc
|
|
@ -1,4 +1,4 @@
|
|||
local Function_Info = require "entities.Function_Info"
|
||||
local Function_Info = require "entity.Function_Info"
|
||||
local List = require "pl.List"
|
||||
local utils = require "utils"
|
||||
local template = require "pl.template"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
local Module_Doc = require "entities.Module_Doc"
|
||||
local Module_Doc = require "entity.Module_Doc"
|
||||
local template = require "pl.template"
|
||||
local utils = require "utils"
|
||||
local snippets = require "generator.snippets"
|
||||
|
|
|
@ -1,31 +1,31 @@
|
|||
local crawler = require "crawler"
|
||||
local filesystem = require "filesystem"
|
||||
local logger = require "logger"
|
||||
local properties = require "properties"
|
||||
local property = require "property"
|
||||
local scraper = require "scraper"
|
||||
local generator = require "generator"
|
||||
|
||||
local log = logger.log("main")
|
||||
|
||||
log:info(logger.message_with_metadata("Start", { properties = properties }))
|
||||
log:info(logger.message_with_metadata("Start", { property = property }))
|
||||
|
||||
local index = crawler.fetch(properties.base_url .. properties.index_uri)
|
||||
local index = crawler.fetch(property.base_url .. property.index_uri)
|
||||
|
||||
-- local modules =
|
||||
-- scraper.get_modules_from_index(index, properties.ignored_modules)
|
||||
-- scraper.get_modules_from_index(index, property.ignored_modules)
|
||||
local module_infos = scraper.module_info_list.get_modules_from_index(index)
|
||||
|
||||
log:info("Finished Module List scrapping, found " .. #module_infos .. " modules")
|
||||
-- for i = 1, 1 do -- #modules do
|
||||
-- local m = modules[i]
|
||||
-- log:info(inspect { try = m })
|
||||
-- local page = crawler.fetch(properties.base_url .. "/" .. m.uri)
|
||||
-- local page = crawler.fetch(property.base_url .. "/" .. m.uri)
|
||||
-- local items = scraper.get_doc_from_page(page)
|
||||
-- log:info(inspect { items })
|
||||
-- end
|
||||
|
||||
local html =
|
||||
crawler.fetch(properties.base_url .. "/widgets/wibox.widget.textbox.html")
|
||||
crawler.fetch(property.base_url .. "/widgets/wibox.widget.textbox.html")
|
||||
local module_doc = scraper.module_doc.get_doc_from_page(html)
|
||||
-- log:info(inspect { module_doc = module_doc })
|
||||
|
||||
|
@ -35,5 +35,5 @@ local module_doc = scraper.module_doc.get_doc_from_page(html)
|
|||
local mod = "textbox"
|
||||
filesystem.file_writer.write(
|
||||
generator.teal_type_definitions.generate_teal(mod, module_doc),
|
||||
properties.out_directory .. "/" .. mod .. ".d.tl"
|
||||
property.out_directory .. "/" .. mod .. ".d.tl"
|
||||
)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
local record Properties
|
||||
local record Property
|
||||
base_url: string
|
||||
index_uri: string
|
||||
|
||||
|
@ -11,7 +11,7 @@ local record Properties
|
|||
ignored_modules: { string }
|
||||
end
|
||||
|
||||
local properties: Properties = {
|
||||
local property: Property = {
|
||||
-- base_url = "https://awesomewm.org/apidoc",
|
||||
base_url = "file:///usr/share/doc/awesome/doc",
|
||||
index_uri = "/index.html",
|
||||
|
@ -69,4 +69,4 @@ local properties: Properties = {
|
|||
}
|
||||
}
|
||||
|
||||
return properties
|
||||
return property
|
|
@ -1,7 +1,7 @@
|
|||
local Function_Info = require "entities.Function_Info"
|
||||
local Function_Info = require "entity.Function_Info"
|
||||
local List = require "pl.List"
|
||||
local logger = require "logger"
|
||||
local Module_Doc = require "entities.Module_Doc"
|
||||
local Module_Doc = require "entity.Module_Doc"
|
||||
local scan = require "web_sanitize.query.scan_html"
|
||||
local scraper_utils = require "scraper.utils"
|
||||
local utils = require "utils"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
local Module_Info = require "entities.Module_Info"
|
||||
local Module_Info = require "entity.Module_Info"
|
||||
local scan = require "web_sanitize.query.scan_html"
|
||||
local scraper_utils = require "scraper.utils"
|
||||
local utils = require "utils"
|
||||
|
|
Loading…
Reference in New Issue