version bump; underscore escaping fix for backticks; horrible workaround for deficiencies of C lexer
This commit is contained in:
parent
2fbe566039
commit
c516eebd12
2
ldoc.lua
2
ldoc.lua
|
@ -37,7 +37,7 @@ app.require_here()
|
|||
|
||||
--- @usage
|
||||
local usage = [[
|
||||
ldoc, a documentation generator for Lua, vs 1.4.2
|
||||
ldoc, a documentation generator for Lua, vs 1.4.3
|
||||
-d,--dir (default doc) output directory
|
||||
-o,--output (default 'index') output name
|
||||
-v,--verbose verbose
|
||||
|
|
|
@ -259,7 +259,7 @@ function CC.lexer(f)
|
|||
local err
|
||||
f,err = utils.readfile(f)
|
||||
if not f then quit(err) end
|
||||
return lexer.cpp(f,{})
|
||||
return lexer.cpp(f,{},nil,true)
|
||||
end
|
||||
|
||||
function CC:grab_block_comment(v,tok)
|
||||
|
|
|
@ -44,7 +44,7 @@ local STRING2 = [[^".-[^\\]"]]
|
|||
local STRING3 = "^((['\"])%2)" -- empty string
|
||||
local PREPRO = '^#.-[^\\]\n'
|
||||
|
||||
local plain_matches,lua_matches,cpp_matches,lua_keyword,cpp_keyword
|
||||
local plain_matches,lua_matches,cpp_matches,cpp_matches_no_string,lua_keyword,cpp_keyword
|
||||
|
||||
local function tdump(tok)
|
||||
return tok,tok
|
||||
|
@ -324,7 +324,7 @@ end
|
|||
-- @param filter a table of token types to exclude, by default {space=true,comments=true}
|
||||
-- @param options a table of options; by default, {number=true,string=true},
|
||||
-- which means convert numbers and strip string quotes.
|
||||
function lexer.cpp(s,filter,options)
|
||||
function lexer.cpp(s,filter,options,no_string)
|
||||
filter = filter or {comments=true}
|
||||
if not cpp_keyword then
|
||||
cpp_keyword = {
|
||||
|
@ -350,9 +350,9 @@ function lexer.cpp(s,filter,options)
|
|||
{IDEN,cpp_vdump},
|
||||
{NUMBER4,ndump},
|
||||
{NUMBER5,ndump},
|
||||
-- {STRING3,sdump},
|
||||
-- {STRING1,chdump},
|
||||
-- {STRING2,sdump},
|
||||
{STRING3,sdump},
|
||||
{STRING1,chdump},
|
||||
{STRING2,sdump},
|
||||
{'^//.-\n',cdump},
|
||||
{'^/%*.-%*/',cdump},
|
||||
{'^==',tdump},
|
||||
|
@ -375,7 +375,39 @@ function lexer.cpp(s,filter,options)
|
|||
{'^.',tdump}
|
||||
}
|
||||
end
|
||||
return lexer.scan(s,cpp_matches,filter,options)
|
||||
if not cpp_matches_no_string then
|
||||
cpp_matches_no_string = {
|
||||
{WSPACE,wsdump},
|
||||
{PREPRO,pdump},
|
||||
{NUMBER3,ndump},
|
||||
{IDEN,cpp_vdump},
|
||||
{NUMBER4,ndump},
|
||||
{NUMBER5,ndump},
|
||||
{'^//.-\n',cdump},
|
||||
{'^/%*.-%*/',cdump},
|
||||
{'^==',tdump},
|
||||
{'^!=',tdump},
|
||||
{'^<=',tdump},
|
||||
{'^>=',tdump},
|
||||
{'^->',tdump},
|
||||
{'^&&',tdump},
|
||||
{'^||',tdump},
|
||||
{'^%+%+',tdump},
|
||||
{'^%-%-',tdump},
|
||||
{'^%+=',tdump},
|
||||
{'^%-=',tdump},
|
||||
{'^%*=',tdump},
|
||||
{'^/=',tdump},
|
||||
{'^|=',tdump},
|
||||
{'^%^=',tdump},
|
||||
{'^::',tdump},
|
||||
{'^%.%.%.',tdump},
|
||||
{'^.',tdump}
|
||||
}
|
||||
end
|
||||
return lexer.scan(s,
|
||||
not no_string and cpp_matches or cpp_matches_no_string,
|
||||
filter,options)
|
||||
end
|
||||
|
||||
--- get a list of parameters separated by a delimiter from a stream.
|
||||
|
|
|
@ -54,13 +54,14 @@ local function resolve_inline_references (ldoc, txt, item, plain)
|
|||
if backtick_references then
|
||||
res = res:gsub('`([^`]+)`',function(name)
|
||||
local ref,err = markup.process_reference(name)
|
||||
local label = name
|
||||
if name and do_escape then
|
||||
label = name:gsub('_', '\\_')
|
||||
end
|
||||
if ref then
|
||||
if name and do_escape then
|
||||
name = name:gsub('_', '\\_')
|
||||
end
|
||||
return ('<a href="%s">%s</a>'):format(ldoc.href(ref),name)
|
||||
return ('<a href="%s">%s</a>'):format(ldoc.href(ref),label)
|
||||
else
|
||||
return '<code>'..name..'</code>'
|
||||
return '<code>'..label..'</code>'
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
@ -117,7 +118,7 @@ local global_context, local_context
|
|||
-- - prettify any code blocks
|
||||
|
||||
local function process_multiline_markdown(ldoc, txt, F, filename, deflang)
|
||||
local res, L, append = {}, 0, table.insert
|
||||
local res, L, append = {}, 0, table.insert
|
||||
local err_item = {
|
||||
warning = function (self,msg)
|
||||
io.stderr:write(filename..':'..L..': '..msg,'\n')
|
||||
|
@ -133,7 +134,7 @@ local function process_multiline_markdown(ldoc, txt, F, filename, deflang)
|
|||
if code ~= '' then
|
||||
local err
|
||||
-- If we omit the following '\n', a '--' (or '//') comment on the
|
||||
-- last line won't be recognized.
|
||||
-- last line won't be recognized.
|
||||
code, err = prettify.code(lang,filename,code..'\n',L,false)
|
||||
code = resolve_inline_references(ldoc, code, err_item,true)
|
||||
append(res,'<pre>')
|
||||
|
@ -308,7 +309,7 @@ local function get_processor(ldoc, format)
|
|||
-- AFAIK only markdown.lua has underscore-in-identifier problem...
|
||||
if ldoc.dont_escape_underscore ~= nil then
|
||||
ldoc.dont_escape_underscore = actual_format ~= 'markdown'
|
||||
end
|
||||
end
|
||||
return markdown_processor(ldoc, formatter)
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in New Issue