run: make the program run on all the doc site
This commit is contained in:
parent
b423280215
commit
fcad0b33f9
|
@ -1,6 +1,8 @@
|
||||||
local crawler = require "crawler"
|
local crawler = require "crawler"
|
||||||
local filesystem = require "filesystem"
|
local filesystem = require "filesystem"
|
||||||
|
local List = require "pl.List"
|
||||||
local logger = require "logger"
|
local logger = require "logger"
|
||||||
|
local Module_Info = require "entity.Module_Info"
|
||||||
local property = require "property"
|
local property = require "property"
|
||||||
local scraper = require "scraper"
|
local scraper = require "scraper"
|
||||||
local generator = require "generator"
|
local generator = require "generator"
|
||||||
|
@ -11,48 +13,28 @@ log:info(logger.message_with_metadata("Start", { property = property }))
|
||||||
|
|
||||||
local index = crawler.fetch(property.base_url .. property.index_uri)
|
local index = crawler.fetch(property.base_url .. property.index_uri)
|
||||||
|
|
||||||
-- local modules =
|
local ignored_modules = List(property.ignored_modules)
|
||||||
-- scraper.get_modules_from_index(index, property.ignored_modules)
|
local module_infos = List(scraper.module_info_list.get_modules_from_index(index)):filter(
|
||||||
local module_infos = scraper.module_info_list.get_modules_from_index(index)
|
function(module: Module_Info.Module_Info): boolean
|
||||||
|
return not ignored_modules:contains(module.name)
|
||||||
|
end
|
||||||
|
)
|
||||||
|
|
||||||
log:info("Finished Module List scrapping, found " .. #module_infos .. " modules")
|
log:info("Finished Module List scrapping, found " .. #module_infos .. " modules")
|
||||||
-- for i = 1, 1 do -- #modules do
|
|
||||||
-- local m = modules[i]
|
|
||||||
-- log:info(inspect { try = m })
|
|
||||||
-- local page = crawler.fetch(property.base_url .. "/" .. m.uri)
|
|
||||||
-- local items = scraper.get_doc_from_page(page)
|
|
||||||
-- log:info(inspect { items })
|
|
||||||
-- end
|
|
||||||
|
|
||||||
local function do_one_file(url: string, module_name: string, output: string)
|
local function do_one_file(url: string, output_base_dir: string)
|
||||||
|
local module_name = url:gsub(".*/", ""):gsub(".html", "")
|
||||||
local html = crawler.fetch(url)
|
local html = crawler.fetch(url)
|
||||||
local module_doc = scraper.module_doc.get_doc_from_page(html, module_name)
|
local module_doc = scraper.module_doc.get_doc_from_page(html, module_name)
|
||||||
filesystem.file_writer.write(
|
filesystem.file_writer.write(
|
||||||
generator.teal_type_definitions.generate_teal(module_doc),
|
generator.teal_type_definitions.generate_teal(module_doc),
|
||||||
output
|
output_base_dir .. module_name:gsub("%.", "/") .. ".d.tl"
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
do_one_file(
|
for i = 1, #module_infos do
|
||||||
property.base_url .. "/widgets/wibox.widget.textbox.html",
|
do_one_file(
|
||||||
"wibox.widget.textbox",
|
property.base_url .. "/" .. module_infos[i].uri,
|
||||||
property.out_directory .. "/textbox.d.tl"
|
property.out_directory .. "/"
|
||||||
)
|
)
|
||||||
|
end
|
||||||
do_one_file(
|
|
||||||
property.base_url .. "/popups_and_bars/wibox.html",
|
|
||||||
"wibox",
|
|
||||||
property.out_directory .. "/wibox.d.tl"
|
|
||||||
)
|
|
||||||
|
|
||||||
do_one_file(
|
|
||||||
property.base_url .. "/widget_layouts/wibox.layout.fixed.html",
|
|
||||||
"wibox.layout.fixed",
|
|
||||||
property.out_directory .. "/fixed.d.tl"
|
|
||||||
)
|
|
||||||
|
|
||||||
do_one_file(
|
|
||||||
property.base_url .. "/core_components/client.html",
|
|
||||||
"client",
|
|
||||||
property.out_directory .. "/client.d.tl"
|
|
||||||
)
|
|
||||||
|
|
Loading…
Reference in New Issue