version bump; underscore escaping fix for backticks; horrible workaround for deficiencies of C lexer

This commit is contained in:
steve donovan 2014-10-25 12:01:48 +02:00
parent 2fbe566039
commit c516eebd12
4 changed files with 49 additions and 16 deletions

View File

@ -37,7 +37,7 @@ app.require_here()
--- @usage
local usage = [[
ldoc, a documentation generator for Lua, vs 1.4.2
ldoc, a documentation generator for Lua, vs 1.4.3
-d,--dir (default doc) output directory
-o,--output (default 'index') output name
-v,--verbose verbose

View File

@ -259,7 +259,7 @@ function CC.lexer(f)
local err
f,err = utils.readfile(f)
if not f then quit(err) end
return lexer.cpp(f,{})
return lexer.cpp(f,{},nil,true)
end
function CC:grab_block_comment(v,tok)

View File

@ -44,7 +44,7 @@ local STRING2 = [[^".-[^\\]"]]
local STRING3 = "^((['\"])%2)" -- empty string
local PREPRO = '^#.-[^\\]\n'
local plain_matches,lua_matches,cpp_matches,lua_keyword,cpp_keyword
local plain_matches,lua_matches,cpp_matches,cpp_matches_no_string,lua_keyword,cpp_keyword
local function tdump(tok)
return tok,tok
@ -324,7 +324,7 @@ end
-- @param filter a table of token types to exclude, by default {space=true,comments=true}
-- @param options a table of options; by default, {number=true,string=true},
-- which means convert numbers and strip string quotes.
function lexer.cpp(s,filter,options)
function lexer.cpp(s,filter,options,no_string)
filter = filter or {comments=true}
if not cpp_keyword then
cpp_keyword = {
@ -350,9 +350,9 @@ function lexer.cpp(s,filter,options)
{IDEN,cpp_vdump},
{NUMBER4,ndump},
{NUMBER5,ndump},
-- {STRING3,sdump},
-- {STRING1,chdump},
-- {STRING2,sdump},
{STRING3,sdump},
{STRING1,chdump},
{STRING2,sdump},
{'^//.-\n',cdump},
{'^/%*.-%*/',cdump},
{'^==',tdump},
@ -375,7 +375,39 @@ function lexer.cpp(s,filter,options)
{'^.',tdump}
}
end
return lexer.scan(s,cpp_matches,filter,options)
if not cpp_matches_no_string then
cpp_matches_no_string = {
{WSPACE,wsdump},
{PREPRO,pdump},
{NUMBER3,ndump},
{IDEN,cpp_vdump},
{NUMBER4,ndump},
{NUMBER5,ndump},
{'^//.-\n',cdump},
{'^/%*.-%*/',cdump},
{'^==',tdump},
{'^!=',tdump},
{'^<=',tdump},
{'^>=',tdump},
{'^->',tdump},
{'^&&',tdump},
{'^||',tdump},
{'^%+%+',tdump},
{'^%-%-',tdump},
{'^%+=',tdump},
{'^%-=',tdump},
{'^%*=',tdump},
{'^/=',tdump},
{'^|=',tdump},
{'^%^=',tdump},
{'^::',tdump},
{'^%.%.%.',tdump},
{'^.',tdump}
}
end
return lexer.scan(s,
not no_string and cpp_matches or cpp_matches_no_string,
filter,options)
end
--- get a list of parameters separated by a delimiter from a stream.

View File

@ -54,13 +54,14 @@ local function resolve_inline_references (ldoc, txt, item, plain)
if backtick_references then
res = res:gsub('`([^`]+)`',function(name)
local ref,err = markup.process_reference(name)
if ref then
local label = name
if name and do_escape then
name = name:gsub('_', '\\_')
label = name:gsub('_', '\\_')
end
return ('<a href="%s">%s</a>'):format(ldoc.href(ref),name)
if ref then
return ('<a href="%s">%s</a>'):format(ldoc.href(ref),label)
else
return '<code>'..name..'</code>'
return '<code>'..label..'</code>'
end
end)
end