bug 434217: [cleancode] format all lua files and use unix line delimiter
diff --git a/libraries/doctemplates/template/file.lua b/libraries/doctemplates/template/file.lua
index 0d5831f..ef9e1f9 100644
--- a/libraries/doctemplates/template/file.lua
+++ b/libraries/doctemplates/template/file.lua
@@ -1,106 +1,106 @@
---------------------------------------------------------------------------------
--- Copyright (c) 2012 Sierra Wireless.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Kevin KIN-FOO <kkinfoo@sierrawireless.com>
--- - initial API and implementation and initial documentation
---------------------------------------------------------------------------------
-return[[#
-<div id="content">
-# --
-# -- Module name
-# --
-# if _file.name then
- <h$(i)>Module <code>$(_file.name)</code></h$(i)>
-# end
-# --
-# -- Descriptions
-# --
-# if _file.shortdescription then
- $( format(_file.shortdescription) )
-# end
-# if _file.description and #_file.description > 0 then
- $( format(_file.description) )
-# end
-# --
-# -- Handle "@usage" special tag
-# --
-#if _file.metadata and _file.metadata.usage then
- $( applytemplate(_file.metadata.usage, i+1) )
-#end
-# --
-# -- Show quick description of current type
-# --
-#
-# -- show quick description for globals
-# if not isempty(_file.globalvars) then
- <h$(i+1)>Global(s)</h$(i+1)>
- <table class="function_list">
-# for _, item in sortedpairs(_file.globalvars) do
- <tr>
- <td class="name" nowrap="nowrap">$( fulllinkto(item) )</td>
- <td class="summary">$( format(item.shortdescription) )</td>
- </tr>
-# end
- </table>
-# end
-#
-# -- get type corresponding to this file (module)
-# local currenttype
-# local typeref = _file:moduletyperef()
-# if typeref and typeref.tag == "internaltyperef" then
-# local typedef = _file.types[typeref.typename]
-# if typedef and typedef.tag == "recordtypedef" then
-# currenttype = typedef
-# end
-# end
-#
-# -- show quick description type exposed by module
+--------------------------------------------------------------------------------
+-- Copyright (c) 2012 Sierra Wireless.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Kevin KIN-FOO <kkinfoo@sierrawireless.com>
+-- - initial API and implementation and initial documentation
+--------------------------------------------------------------------------------
+return[[#
+<div id="content">
+# --
+# -- Module name
+# --
+# if _file.name then
+ <h$(i)>Module <code>$(_file.name)</code></h$(i)>
+# end
+# --
+# -- Descriptions
+# --
+# if _file.shortdescription then
+ $( format(_file.shortdescription) )
+# end
+# if _file.description and #_file.description > 0 then
+ $( format(_file.description) )
+# end
+# --
+# -- Handle "@usage" special tag
+# --
+#if _file.metadata and _file.metadata.usage then
+ $( applytemplate(_file.metadata.usage, i+1) )
+#end
+# --
+# -- Show quick description of current type
+# --
+#
+# -- show quick description for globals
+# if not isempty(_file.globalvars) then
+ <h$(i+1)>Global(s)</h$(i+1)>
+ <table class="function_list">
+# for _, item in sortedpairs(_file.globalvars) do
+ <tr>
+ <td class="name" nowrap="nowrap">$( fulllinkto(item) )</td>
+ <td class="summary">$( format(item.shortdescription) )</td>
+ </tr>
+# end
+ </table>
+# end
+#
+# -- get type corresponding to this file (module)
+# local currenttype
+# local typeref = _file:moduletyperef()
+# if typeref and typeref.tag == "internaltyperef" then
+# local typedef = _file.types[typeref.typename]
+# if typedef and typedef.tag == "recordtypedef" then
+# currenttype = typedef
+# end
+# end
+#
+# -- show quick description type exposed by module
# if currenttype and (not isempty(currenttype.fields) or currenttype:getcalldef()) then
- <h$(i+1)><a id="$(anchor(currenttype))" >Type <code>$(currenttype.name)</code></a></h$(i+1)>
- $( applytemplate(currenttype, i+2, 'index') )
-# end
-# --
-# -- Show quick description of other types
-# --
-# if _file.types then
-# for name, type in sortedpairs( _file.types ) do
-# if type ~= currenttype and type.tag == 'recordtypedef' and (not isempty(type.fields) or type:getcalldef()) then
- <h$(i+1)><a id="$(anchor(type))">Type <code>$(name)</code></a></h$(i+1)>
- $( applytemplate(type, i+2, 'index') )
-# end
-# end
-# end
-# --
-# -- Long description of globals
-# --
-# if not isempty(_file.globalvars) then
- <h$(i+1)>Global(s)</h$(i+1)>
-# for name, item in sortedpairs(_file.globalvars) do
- $( applytemplate(item, i+2) )
-# end
-# end
-# --
-# -- Long description of current type
-# --
-# if currenttype then
- <h$(i+1)><a id="$(anchor(currenttype))" >Type <code>$(currenttype.name)</code></a></h$(i+1)>
- $( applytemplate(currenttype, i+2) )
-# end
-# --
-# -- Long description of other types
-# --
-# if not isempty( _file.types ) then
-# for name, type in sortedpairs( _file.types ) do
-# if type ~= currenttype and type.tag == 'recordtypedef' then
- <h$(i+1)><a id="$(anchor(type))" >Type <code>$(name)</code></a></h$(i+1)>
- $( applytemplate(type, i+2) )
-# end
-# end
-# end
-</div>
+ <h$(i+1)><a id="$(anchor(currenttype))" >Type <code>$(currenttype.name)</code></a></h$(i+1)>
+ $( applytemplate(currenttype, i+2, 'index') )
+# end
+# --
+# -- Show quick description of other types
+# --
+# if _file.types then
+# for name, type in sortedpairs( _file.types ) do
+# if type ~= currenttype and type.tag == 'recordtypedef' and (not isempty(type.fields) or type:getcalldef()) then
+ <h$(i+1)><a id="$(anchor(type))">Type <code>$(name)</code></a></h$(i+1)>
+ $( applytemplate(type, i+2, 'index') )
+# end
+# end
+# end
+# --
+# -- Long description of globals
+# --
+# if not isempty(_file.globalvars) then
+ <h$(i+1)>Global(s)</h$(i+1)>
+# for name, item in sortedpairs(_file.globalvars) do
+ $( applytemplate(item, i+2) )
+# end
+# end
+# --
+# -- Long description of current type
+# --
+# if currenttype then
+ <h$(i+1)><a id="$(anchor(currenttype))" >Type <code>$(currenttype.name)</code></a></h$(i+1)>
+ $( applytemplate(currenttype, i+2) )
+# end
+# --
+# -- Long description of other types
+# --
+# if not isempty( _file.types ) then
+# for name, type in sortedpairs( _file.types ) do
+# if type ~= currenttype and type.tag == 'recordtypedef' then
+ <h$(i+1)><a id="$(anchor(type))" >Type <code>$(name)</code></a></h$(i+1)>
+ $( applytemplate(type, i+2) )
+# end
+# end
+# end
+</div>
]]
diff --git a/libraries/doctemplates/template/index/recordtypedef.lua b/libraries/doctemplates/template/index/recordtypedef.lua
index e9ead7e..2d4ab13 100644
--- a/libraries/doctemplates/template/index/recordtypedef.lua
+++ b/libraries/doctemplates/template/index/recordtypedef.lua
@@ -13,19 +13,19 @@
# local calldef = _recordtypedef:getcalldef()
# local hasfield = not isempty(_recordtypedef.fields)
# if calldef or hasfield then
- <table class="function_list">
+ <table class="function_list">
# if calldef then
<tr>
<td class="name" nowrap="nowrap">$( fulllinkto(calldef,_recordtypedef) )</td>
<td class="summary">$( format(calldef.shortdescription) )</td>
</tr>
# end
-# for _, item in sortedpairs( _recordtypedef.fields ) do
- <tr>
- <td class="name" nowrap="nowrap">$( fulllinkto(item) )</td>
- <td class="summary">$( format(item.shortdescription) )</td>
- </tr>
-# end
- </table>
+# for _, item in sortedpairs( _recordtypedef.fields ) do
+ <tr>
+ <td class="name" nowrap="nowrap">$( fulllinkto(item) )</td>
+ <td class="summary">$( format(item.shortdescription) )</td>
+ </tr>
+# end
+ </table>
# end
# ]]
diff --git a/libraries/doctemplates/template/item.lua b/libraries/doctemplates/template/item.lua
index bedacd6..2faa026 100644
--- a/libraries/doctemplates/template/item.lua
+++ b/libraries/doctemplates/template/item.lua
@@ -21,13 +21,13 @@
# -- Show item type for internal type
# --
#if _item.type and (not typedef or typedef.tag ~= 'functiontypedef') then
-# --Show link only when available
-# local link = fulllinkto(_item.type)
-# if link then
- <em>$( link )</em>
-# else
- <em>$(prettyname(_item.type))</em>
-# end
+# --Show link only when available
+# local link = fulllinkto(_item.type)
+# if link then
+ <em>$( link )</em>
+# else
+ <em>$(prettyname(_item.type))</em>
+# end
#end
<a id="$(anchor(_item))" >
<strong>$( prettyname(_item) )</strong>
@@ -36,11 +36,11 @@
<dd>
# local ignoredescription = false
# if _item.shortdescription then
- $( format(_item.shortdescription) )
+ $( format(_item.shortdescription) )
# ignoredescription = true
# end
# if _item.description and #_item.description > 0 then
- $( format(_item.description) )
+ $( format(_item.description) )
# ignoredescription = true
# end
#
@@ -48,7 +48,7 @@
# -- For function definitions, describe parameters and return values
# --
#if typedef and typedef.tag == 'functiontypedef' then
-# local fdef = typedef
+# local fdef = typedef
$( applytemplate(fdef, i,nil,isinvokable(_item),ignoredescription) )
#else
#--
diff --git a/libraries/doctemplates/template/recordtypedef.lua b/libraries/doctemplates/template/recordtypedef.lua
index 3147794..e5418c9 100644
--- a/libraries/doctemplates/template/recordtypedef.lua
+++ b/libraries/doctemplates/template/recordtypedef.lua
@@ -20,10 +20,10 @@
# -- Descriptions
# --
#if _recordtypedef.shortdescription and #_recordtypedef.shortdescription > 0 then
- $( format( _recordtypedef.shortdescription ) )
+ $( format( _recordtypedef.shortdescription ) )
#end
#if _recordtypedef.description and #_recordtypedef.description > 0 then
- $( format( _recordtypedef.description ) )
+ $( format( _recordtypedef.description ) )
#end
# --
# -- Structure
@@ -50,7 +50,7 @@
#-- Describe usage
#--
#if _recordtypedef.metadata and _recordtypedef.metadata.usage then
- $( applytemplate(_recordtypedef.metadata.usage, i) )
+ $( applytemplate(_recordtypedef.metadata.usage, i) )
#end
# --
# -- Describe type fields
@@ -58,7 +58,7 @@
#local calldef = _recordtypedef:getcalldef()
#local hasfield = not isempty(_recordtypedef.fields)
#if calldef or hasfield then
- <h$(i)>Field(s)</h$(i)>
+ <h$(i)>Field(s)</h$(i)>
# if calldef then
<dl class="function">
<dt>
@@ -66,12 +66,12 @@
<strong>$( prettyname(calldef,_recordtypedef) )</strong>
</a>
</dt>
-<dd>
+<dd>
$( applytemplate(calldef, i, nil, true) )
</dd>
</dl>
# end
-# for name, item in sortedpairs( _recordtypedef.fields ) do
- $( applytemplate(item, i) )
-# end
+# for name, item in sortedpairs( _recordtypedef.fields ) do
+ $( applytemplate(item, i) )
+# end
#end ]]
diff --git a/libraries/doctemplates/template/usage.lua b/libraries/doctemplates/template/usage.lua
index bb74dd0..ebf9ea0 100644
--- a/libraries/doctemplates/template/usage.lua
+++ b/libraries/doctemplates/template/usage.lua
@@ -14,20 +14,20 @@
#-- Show usage samples
#--
#if _usage then
-# if #_usage > 1 then
-# -- Show all usages
- <h$(i)>Usages:</h$(i)>
- <ul>
-# -- Loop over several usage description
-# for _, usage in ipairs(_usage) do
- <li><pre class="example"><code>$( securechevrons(usage.description) )</code></pre></li>
-# end
- </ul>
-# elseif #_usage == 1 then
-# -- Show unique usage sample
- <h$(i)>Usage:</h$(i)>
-# local usage = _usage[1]
- <pre class="example"><code>$( securechevrons(usage.description) )</code></pre>
-# end
+# if #_usage > 1 then
+# -- Show all usages
+ <h$(i)>Usages:</h$(i)>
+ <ul>
+# -- Loop over several usage description
+# for _, usage in ipairs(_usage) do
+ <li><pre class="example"><code>$( securechevrons(usage.description) )</code></pre></li>
+# end
+ </ul>
+# elseif #_usage == 1 then
+# -- Show unique usage sample
+ <h$(i)>Usage:</h$(i)>
+# local usage = _usage[1]
+ <pre class="example"><code>$( securechevrons(usage.description) )</code></pre>
+# end
#end
#]]
diff --git a/libraries/doctemplates/template/utils.lua b/libraries/doctemplates/template/utils.lua
index 98bbdfe..17fae44 100644
--- a/libraries/doctemplates/template/utils.lua
+++ b/libraries/doctemplates/template/utils.lua
@@ -16,14 +16,14 @@
-- Handles link generation, node quick description.
--
-- Provides:
--- * link generation
--- * anchor generation
--- * node quick description
+-- * link generation
+-- * anchor generation
+-- * node quick description
local M = {}
function M.isempty(map)
- local f = pairs(map)
- return f(map) == nil
+ local f = pairs(map)
+ return f(map) == nil
end
---
@@ -31,26 +31,26 @@
-- recordtypedef => #(typename)
-- item (field of recordtypedef) => #(typename).itemname
-- item (global) => itemname
---functiontypedef callof given type => ##(giventypename)__call
-
+--functiontypedef callof given type => ##(giventypename)__call
+
M.anchortypes = {
- recordtypedef = function (o) return string.format('#(%s)', o.name) end,
- functiontypedef = function (o,t) return string.format('#(%s)__call', t.name) end,
- item = function(o)
- if not o.parent or o.parent.tag == 'file' then
- -- Handle items referencing globals
- return o.name
- elseif o.parent and o.parent.tag == 'recordtypedef' then
- -- Handle items included in recordtypedef
- local recordtypedef = o.parent
- local recordtypedefanchor = M.anchor(recordtypedef)
- if not recordtypedefanchor then
- return nil, 'Unable to generate anchor for `recordtypedef parent.'
- end
- return string.format('%s.%s', recordtypedefanchor, o.name)
- end
- return nil, 'Unable to generate anchor for `item'
- end
+ recordtypedef = function (o) return string.format('#(%s)', o.name) end,
+ functiontypedef = function (o,t) return string.format('#(%s)__call', t.name) end,
+ item = function(o)
+ if not o.parent or o.parent.tag == 'file' then
+ -- Handle items referencing globals
+ return o.name
+ elseif o.parent and o.parent.tag == 'recordtypedef' then
+ -- Handle items included in recordtypedef
+ local recordtypedef = o.parent
+ local recordtypedefanchor = M.anchor(recordtypedef)
+ if not recordtypedefanchor then
+ return nil, 'Unable to generate anchor for `recordtypedef parent.'
+ end
+ return string.format('%s.%s', recordtypedefanchor, o.name)
+ end
+ return nil, 'Unable to generate anchor for `item'
+ end
}
---
@@ -63,29 +63,29 @@
-- # local anchorname = anchor(someobject)
-- <a id="$(anchorname)" />
function M.anchor( modelobject,... )
- local tag = modelobject.tag
- if M.anchortypes[ tag ] then
- return M.anchortypes[ tag ](modelobject,...)
- end
- return nil, string.format('No anchor available for `%s', tag)
+ local tag = modelobject.tag
+ if M.anchortypes[ tag ] then
+ return M.anchortypes[ tag ](modelobject,...)
+ end
+ return nil, string.format('No anchor available for `%s', tag)
end
local function getexternalmodule( item )
- -- Get file which contains this item
- local file
- if item.parent then
- if item.parent.tag =='recordtypedef' then
- local recordtypedefparent = item.parent.parent
- if recordtypedefparent and recordtypedefparent.tag =='file'then
- file = recordtypedefparent
- end
- elseif item.parent.tag =='file' then
- file = item.parent
- else
- return nil, 'Unable to fetch item parent'
- end
- end
- return file
+ -- Get file which contains this item
+ local file
+ if item.parent then
+ if item.parent.tag =='recordtypedef' then
+ local recordtypedefparent = item.parent.parent
+ if recordtypedefparent and recordtypedefparent.tag =='file'then
+ file = recordtypedefparent
+ end
+ elseif item.parent.tag =='file' then
+ file = item.parent
+ else
+ return nil, 'Unable to fetch item parent'
+ end
+ end
+ return file
end
---
@@ -97,7 +97,7 @@
-- => linkto(file)#anchor(recordtyperef)
-- file(module) => modulename.html
-- index => index.html
--- functiontypedef callof given type => ##(giventypename)__call
+-- functiontypedef callof given type => ##(giventypename)__call
-- recordtypedef => ##(typename)
-- => #anchor(recordtyperef)
-- item (internal field of recordtypedef) => ##(typename).itemname
@@ -109,57 +109,57 @@
-- item (externalglobal) => modulename.html#itemname
-- => linkto(file)#anchor(item)
M.linktypes = {
- internaltyperef = function(o) return string.format('##(%s)', o.typename) end,
- externaltyperef = function(o) return string.format('%s.html##(%s)', o.modulename, o.typename) end,
- file = function(o) return string.format('%s.html', o.name) end,
- index = function() return 'index.html' end,
- recordtypedef = function(o)
- local anchor = M.anchor(o)
- if not anchor then
- return nil, 'Unable to generate anchor for `recordtypedef.'
- end
- return string.format('#%s', anchor)
- end,
- functiontypedef = function(o,...)
+ internaltyperef = function(o) return string.format('##(%s)', o.typename) end,
+ externaltyperef = function(o) return string.format('%s.html##(%s)', o.modulename, o.typename) end,
+ file = function(o) return string.format('%s.html', o.name) end,
+ index = function() return 'index.html' end,
+ recordtypedef = function(o)
+ local anchor = M.anchor(o)
+ if not anchor then
+ return nil, 'Unable to generate anchor for `recordtypedef.'
+ end
+ return string.format('#%s', anchor)
+ end,
+ functiontypedef = function(o,...)
local anchor = M.anchor(o,...)
if not anchor then
return nil, 'Unable to generate anchor for `functiontypedef.'
end
return string.format('#%s', anchor)
end,
- item = function(o)
+ item = function(o)
- -- For every item get anchor
- local anchor = M.anchor(o)
- if not anchor then
- return nil, 'Unable to generate anchor for `item.'
- end
+ -- For every item get anchor
+ local anchor = M.anchor(o)
+ if not anchor then
+ return nil, 'Unable to generate anchor for `item.'
+ end
- -- Built local link to item
- local linktoitem = string.format('#%s', anchor)
+ -- Built local link to item
+ local linktoitem = string.format('#%s', anchor)
- --
- -- For external item, prefix with the link to the module.
- --
- -- The "external item" concept is used only here for short/embedded
- -- notation purposed. This concept and the `.external` field SHALL NOT
- -- be used elsewhere.
- --
- if o.external then
+ --
+ -- For external item, prefix with the link to the module.
+ --
+ -- The "external item" concept is used only here for short/embedded
+ -- notation purposed. This concept and the `.external` field SHALL NOT
+ -- be used elsewhere.
+ --
+ if o.external then
- -- Get link to file which contains this item
- local file = getexternalmodule( o )
- local linktofile = file and M.linkto( file )
- if not linktofile then
- return nil, 'Unable to generate link for external `item.'
- end
+ -- Get link to file which contains this item
+ local file = getexternalmodule( o )
+ local linktofile = file and M.linkto( file )
+ if not linktofile then
+ return nil, 'Unable to generate link for external `item.'
+ end
- -- Built external link to item
- linktoitem = string.format("%s%s", linktofile, linktoitem)
- end
+ -- Built external link to item
+ linktoitem = string.format("%s%s", linktofile, linktoitem)
+ end
- return linktoitem
- end
+ return linktoitem
+ end
}
---
@@ -171,14 +171,14 @@
-- @usage # -- In a template
-- <a href="$( linkto(api) )">Some text</a>
function M.linkto( apiobject,...)
- local tag = apiobject.tag
- if M.linktypes[ tag ] then
- return M.linktypes[tag](apiobject,...)
- end
- if not tag then
- return nil, 'Link generation is impossible as no tag has been provided.'
- end
- return nil, string.format('No link generation available for `%s.', tag)
+ local tag = apiobject.tag
+ if M.linktypes[ tag ] then
+ return M.linktypes[tag](apiobject,...)
+ end
+ if not tag then
+ return nil, 'Link generation is impossible as no tag has been provided.'
+ end
+ return nil, string.format('No link generation available for `%s.', tag)
end
---
@@ -189,7 +189,7 @@
-- externaltyperef => modulename#typename
-- file(module) => modulename
-- index => index
--- functiontypedef callof given type => giventypename(param1,param2, ...)
+-- functiontypedef callof given type => giventypename(param1,param2, ...)
-- recordtypedef => typename
-- item (internal function of recordtypedef) => typename.itemname(param1, param2,...)
-- item (internal func with self of recordtypedef) => typename:itemname(param2)
@@ -202,135 +202,135 @@
-- item (external func global) => functionname(param1, param2,...)
-- item (external non func global) => itemname
M.prettynametypes = {
- primitivetyperef = function(o) return string.format('#%s', o.typename) end,
- externaltyperef = function(o) return string.format('%s#%s', o.modulename, o.typename) end,
- inlinetyperef = function(o)
- if not(o.def and o.def.tag == "recordtypedef" and o.def.name) then
- return nil
- end
- if o.def.name == "list" then
- local valuetypename = M.prettyname(o.def.defaultvaluetyperef)
- return valuetypename and string.format('#list<%s>', valuetypename) or nil
- elseif o.def.name == "map" then
- local keytypename = M.prettyname(o.def.defaultkeytyperef)
- local valuetypename = M.prettyname(o.def.defaultvaluetyperef)
+ primitivetyperef = function(o) return string.format('#%s', o.typename) end,
+ externaltyperef = function(o) return string.format('%s#%s', o.modulename, o.typename) end,
+ inlinetyperef = function(o)
+ if not(o.def and o.def.tag == "recordtypedef" and o.def.name) then
+ return nil
+ end
+ if o.def.name == "list" then
+ local valuetypename = M.prettyname(o.def.defaultvaluetyperef)
+ return valuetypename and string.format('#list<%s>', valuetypename) or nil
+ elseif o.def.name == "map" then
+ local keytypename = M.prettyname(o.def.defaultkeytyperef)
+ local valuetypename = M.prettyname(o.def.defaultvaluetyperef)
return keytypename and valuetypename and string.format('#map<%s,%s>', keytypename, valuetypename) or nil
- else
+ else
return string.format('#%s',o.def.name)
- end
- end,
- index = function(o) return "index" end,
- file = function(o) return o.name end,
- recordtypedef = function(o) return o.name end,
- functiontypedef = function(o,t)
- if t and t.tag == 'recordtypedef' and t.name then
- local paramlist = {}
- for position, param in ipairs(o.params) do
+ end
+ end,
+ index = function(o) return "index" end,
+ file = function(o) return o.name end,
+ recordtypedef = function(o) return o.name end,
+ functiontypedef = function(o,t)
+ if t and t.tag == 'recordtypedef' and t.name then
+ local paramlist = {}
+ for position, param in ipairs(o.params) do
-- we ignore the first param
if not (position == 1) then
table.insert(paramlist, param.name)
end
end
- return string.format('%s(%s)',t.name, table.concat(paramlist, ", "))
- end
- end,
- item = function( o )
+ return string.format('%s(%s)',t.name, table.concat(paramlist, ", "))
+ end
+ end,
+ item = function( o )
- -- Determine item name
- -- ----------------------
- local itemname = o.name
-
- -- Determine scope
- -- ----------------------
- local parent = o.parent
- local isglobal = parent and parent.tag == 'file'
- local isfield = parent and parent.tag == 'recordtypedef'
+ -- Determine item name
+ -- ----------------------
+ local itemname = o.name
- -- Determine type name
- -- ----------------------
-
- local typename = isfield and parent.name
+ -- Determine scope
+ -- ----------------------
+ local parent = o.parent
+ local isglobal = parent and parent.tag == 'file'
+ local isfield = parent and parent.tag == 'recordtypedef'
- -- Fetch item definition
- -- ----------------------
- -- Get file object
- local file
- if isglobal then
- file = parent
- elseif isfield then
- file = parent.parent
- end
- -- Get definition
- local definition = o:resolvetype (file)
-
-
-
- -- Build prettyname
- -- ----------------------
- local prettyname
- if not definition or definition.tag ~= 'functiontypedef' then
- -- Fields
- if isglobal or not typename then
- prettyname = itemname
- else
- prettyname = string.format('%s.%s', typename, itemname)
- end
- else
- -- Functions
- -- Build parameter list
- local paramlist = {}
- local isinvokable = M.isinvokable(o)
- for position, param in ipairs(definition.params) do
- -- For non global function, when first parameter is 'self',
- -- it will not be part of listed parameters
- if not (position == 1 and isinvokable and isfield) then
- table.insert(paramlist, param.name)
- if position ~= #definition.params then
- table.insert(paramlist, ', ')
- end
- end
- end
+ -- Determine type name
+ -- ----------------------
- if isglobal or not typename then
- prettyname = string.format('%s(%s)',itemname, table.concat(paramlist))
- else
- -- Determine function prefix operator,
- -- ':' if 'self' is first parameter, '.' else way
- local operator = isinvokable and ':' or '.'
-
- -- Append function parameters
- prettyname = string.format('%s%s%s(%s)',typename, operator, itemname, table.concat(paramlist))
- end
- end
-
- -- Manage external Item prettyname
- -- ----------------------
- local externalmodule = o.external and getexternalmodule( o )
- local externalmodulename = externalmodule and externalmodule.name
-
- if externalmodulename then
- return string.format('%s#%s',externalmodulename,prettyname)
- else
- return prettyname
- end
- end
+ local typename = isfield and parent.name
+
+ -- Fetch item definition
+ -- ----------------------
+ -- Get file object
+ local file
+ if isglobal then
+ file = parent
+ elseif isfield then
+ file = parent.parent
+ end
+ -- Get definition
+ local definition = o:resolvetype (file)
+
+
+
+ -- Build prettyname
+ -- ----------------------
+ local prettyname
+ if not definition or definition.tag ~= 'functiontypedef' then
+ -- Fields
+ if isglobal or not typename then
+ prettyname = itemname
+ else
+ prettyname = string.format('%s.%s', typename, itemname)
+ end
+ else
+ -- Functions
+ -- Build parameter list
+ local paramlist = {}
+ local isinvokable = M.isinvokable(o)
+ for position, param in ipairs(definition.params) do
+ -- For non global function, when first parameter is 'self',
+ -- it will not be part of listed parameters
+ if not (position == 1 and isinvokable and isfield) then
+ table.insert(paramlist, param.name)
+ if position ~= #definition.params then
+ table.insert(paramlist, ', ')
+ end
+ end
+ end
+
+ if isglobal or not typename then
+ prettyname = string.format('%s(%s)',itemname, table.concat(paramlist))
+ else
+ -- Determine function prefix operator,
+ -- ':' if 'self' is first parameter, '.' else way
+ local operator = isinvokable and ':' or '.'
+
+ -- Append function parameters
+ prettyname = string.format('%s%s%s(%s)',typename, operator, itemname, table.concat(paramlist))
+ end
+ end
+
+ -- Manage external Item prettyname
+ -- ----------------------
+ local externalmodule = o.external and getexternalmodule( o )
+ local externalmodulename = externalmodule and externalmodule.name
+
+ if externalmodulename then
+ return string.format('%s#%s',externalmodulename,prettyname)
+ else
+ return prettyname
+ end
+ end
}
M.prettynametypes.internaltyperef = M.prettynametypes.primitivetyperef
----
--- Check if the given item is a function that can be invoked
+---
+-- Check if the given item is a function that can be invoked
function M.isinvokable(item)
- --test if the item is global
- if item.parent and item.parent.tag == 'file' then
- return false
- end
- -- check first param
- local definition = item:resolvetype()
- if definition and definition.tag == 'functiontypedef' then
- if (#definition.params > 0) then
- return definition.params[1].name == 'self'
- end
- end
+ --test if the item is global
+ if item.parent and item.parent.tag == 'file' then
+ return false
+ end
+ -- check first param
+ local definition = item:resolvetype()
+ if definition and definition.tag == 'functiontypedef' then
+ if (#definition.params > 0) then
+ return definition.params[1].name == 'self'
+ end
+ end
end
---
@@ -338,19 +338,19 @@
--
-- Resolve all element needed to summurize nicely an element form API model.
-- @usage $ print( prettyname(item) )
--- module:somefunction(secondparameter)
+-- module:somefunction(secondparameter)
-- @function [parent = #docutils]
-- @param apiobject Object form API model
-- @result #string Human readable description of given element.
-- @result #nil, #string In case of error.
function M.prettyname( apiobject, ... )
- local tag = apiobject.tag
- if M.prettynametypes[tag] then
- return M.prettynametypes[tag](apiobject,...)
- elseif not tag then
- return nil, 'No pretty name available as no tag has been provided.'
- end
- return nil, string.format('No pretty name for `%s.', tag)
+ local tag = apiobject.tag
+ if M.prettynametypes[tag] then
+ return M.prettynametypes[tag](apiobject,...)
+ elseif not tag then
+ return nil, 'No pretty name available as no tag has been provided.'
+ end
+ return nil, string.format('No pretty name for `%s.', tag)
end
---
@@ -360,8 +360,8 @@
-- @usage securechevrons('<markup>') => '<markup>'
-- @return #string Converted string.
function M.securechevrons( str )
- if not str then return nil, 'String expected.' end
- return string.gsub(str:gsub('<', '<'), '>', '>')
+ if not str then return nil, 'String expected.' end
+ return string.gsub(str:gsub('<', '<'), '>', '>')
end
-------------------------------------------------------------------------------
@@ -376,95 +376,95 @@
-- * `global#foo`
-- * `foo#global.bar`
local globals = function(str)
- -- Handling globals from modules
- for modulename, fieldname in str:gmatch('([%a%.%d_]+)#global%.([%a%.%d_]+)') do
- local item = apimodel._item(fieldname)
- local file = apimodel._file()
- file.name = modulename
- file:addglobalvar( item )
- return item
- end
- -- Handling other globals
- for name in str:gmatch('global#([%a%.%d_]+)') do
- -- print("globale", name)
- return apimodel._externaltypref('global', name)
- end
- return nil
+ -- Handling globals from modules
+ for modulename, fieldname in str:gmatch('([%a%.%d_]+)#global%.([%a%.%d_]+)') do
+ local item = apimodel._item(fieldname)
+ local file = apimodel._file()
+ file.name = modulename
+ file:addglobalvar( item )
+ return item
+ end
+ -- Handling other globals
+ for name in str:gmatch('global#([%a%.%d_]+)') do
+ -- print("globale", name)
+ return apimodel._externaltypref('global', name)
+ end
+ return nil
end
---
-- Transform a string like `module#(type).field` in an API Model item
local field = function( str )
- -- Match `module#type.field`
- local mod, typename, fieldname = str:gmatch('([%a%.%d_]*)#([%a%.%d_]+)%.([%a%.%d_]+)')()
+ -- Match `module#type.field`
+ local mod, typename, fieldname = str:gmatch('([%a%.%d_]*)#([%a%.%d_]+)%.([%a%.%d_]+)')()
- -- Try matching `module#(type).field`
- if not mod then
- mod, typename, fieldname = str:gmatch('([%a%.%d_]*)#%(([%a%.%d_]+)%)%.([%a%.%d_]+)')()
- if not mod then
- -- No match
- return nil
- end
- end
+ -- Try matching `module#(type).field`
+ if not mod then
+ mod, typename, fieldname = str:gmatch('([%a%.%d_]*)#%(([%a%.%d_]+)%)%.([%a%.%d_]+)')()
+ if not mod then
+ -- No match
+ return nil
+ end
+ end
- -- Build according `item
- local modulefielditem = apimodel._item( fieldname )
- local moduletype = apimodel._recordtypedef(typename)
- moduletype:addfield( modulefielditem )
- local typeref
- if #mod > 0 then
- local modulefile = apimodel._file()
- modulefile.name = mod
- modulefile:addtype( moduletype )
- typeref = apimodel._externaltypref(mod, typename)
- modulefielditem.external = true
- else
- typeref = apimodel._internaltyperef(typename)
- end
- modulefielditem.type = typeref
- return modulefielditem
+ -- Build according `item
+ local modulefielditem = apimodel._item( fieldname )
+ local moduletype = apimodel._recordtypedef(typename)
+ moduletype:addfield( modulefielditem )
+ local typeref
+ if #mod > 0 then
+ local modulefile = apimodel._file()
+ modulefile.name = mod
+ modulefile:addtype( moduletype )
+ typeref = apimodel._externaltypref(mod, typename)
+ modulefielditem.external = true
+ else
+ typeref = apimodel._internaltyperef(typename)
+ end
+ modulefielditem.type = typeref
+ return modulefielditem
end
---
-- Build an API internal reference from a string like: `#typeref`
local internal = function ( typestring )
- for name in typestring:gmatch('#([%a%.%d_]+)') do
- -- Do not handle this name is it starts with reserved name "global"
- if name:find("global.") == 1 then return nil end
- return apimodel._internaltyperef(name)
- end
- return nil
+ for name in typestring:gmatch('#([%a%.%d_]+)') do
+ -- Do not handle this name is it starts with reserved name "global"
+ if name:find("global.") == 1 then return nil end
+ return apimodel._internaltyperef(name)
+ end
+ return nil
end
---
-- Build an API external reference from a string like: `mod.ule#type`
local extern = function (type)
- -- Match `mod.ule#ty.pe`
- local modulename, typename = type:gmatch('([%a%.%d_]+)#([%a%.%d_]+)')()
+ -- Match `mod.ule#ty.pe`
+ local modulename, typename = type:gmatch('([%a%.%d_]+)#([%a%.%d_]+)')()
- -- Trying `mod.ule#(ty.pe)`
- if not modulename then
- modulename, typename = type:gmatch('([%a%.%d_]+)#%(([%a%.%d_]+)%)')()
+ -- Trying `mod.ule#(ty.pe)`
+ if not modulename then
+ modulename, typename = type:gmatch('([%a%.%d_]+)#%(([%a%.%d_]+)%)')()
- -- No match at all
- if not modulename then
- return nil
- end
- end
- return apimodel._externaltypref(modulename, typename)
+ -- No match at all
+ if not modulename then
+ return nil
+ end
+ end
+ return apimodel._externaltypref(modulename, typename)
end
---
-- Build an API external reference from a string like: `mod.ule`
local file = function (type)
- for modulename in type:gmatch('([%a%.%d_]+)') do
- local file = apimodel._file()
- file.name = modulename
- return file
- end
- return nil
+ for modulename in type:gmatch('([%a%.%d_]+)') do
+ local file = apimodel._file()
+ file.name = modulename
+ return file
+ end
+ return nil
end
@@ -473,21 +473,21 @@
-- @usage local externaltyperef = getelement("somemodule#somefield")
function M.getelement( str )
- -- Order matters, more restrictive are at begin of table
- local extractors = {
- globals,
- field,
- extern,
- internal,
- file
- }
- -- Loop over extractors.
- -- First valid result is used
- for _, extractor in ipairs( extractors ) do
- local result = extractor( str )
- if result then return result end
- end
- return nil
+ -- Order matters, more restrictive are at begin of table
+ local extractors = {
+ globals,
+ field,
+ extern,
+ internal,
+ file
+ }
+ -- Loop over extractors.
+ -- First valid result is used
+ for _, extractor in ipairs( extractors ) do
+ local result = extractor( str )
+ if result then return result end
+ end
+ return nil
end
--------------------------------------------------------------------------------
@@ -497,14 +497,14 @@
-- @param t table to iterate.
-- @return iterator function.
function M.sortedpairs(t)
- local a = {}
- local insert = table.insert
- for n in pairs(t) do insert(a, n) end
- table.sort(a)
- local i = 0
- return function()
- i = i + 1
- return a[i], t[a[i]]
- end
+ local a = {}
+ local insert = table.insert
+ for n in pairs(t) do insert(a, n) end
+ table.sort(a)
+ local i = 0
+ return function()
+ i = i + 1
+ return a[i], t[a[i]]
+ end
end
return M
diff --git a/libraries/luadbgpclient/debugger/commands.lua b/libraries/luadbgpclient/debugger/commands.lua
index 6931a0f..2471d2c 100644
--- a/libraries/luadbgpclient/debugger/commands.lua
+++ b/libraries/luadbgpclient/debugger/commands.lua
@@ -11,7 +11,7 @@
-- Commands handlers for DBGp protocol.
-------------------------------------------------------------------------------
-- Debugger command functions. Each function handle a different command.
--- A command function is called with 3 arguments
+-- A command function is called with 3 arguments
-- 1. the debug session instance
-- 2. the command arguments as table
-- 3. the command data, if any
@@ -38,317 +38,317 @@
-- @param coro_id (string or nil) Coroutine identifier or nil (current coroutine)
-- @return Coroutine instance or nil (if coro_id was nil or if coroutine is the current coroutine)
local function get_coroutine(self, coro_id)
- if coro_id then
- local coro = dbgp.assert(399, core.active_coroutines.from_id[tonumber(coro_id)], "No such coroutine")
- dbgp.assert(399, coroutine.status(coro) ~= "dead", "Coroutine is dead")
- if coro ~= self.coro[1] then return util.ForeignThread(coro) end
- end
- return self.coro
+ if coro_id then
+ local coro = dbgp.assert(399, core.active_coroutines.from_id[tonumber(coro_id)], "No such coroutine")
+ dbgp.assert(399, coroutine.status(coro) ~= "dead", "Coroutine is dead")
+ if coro ~= self.coro[1] then return util.ForeignThread(coro) end
+ end
+ return self.coro
end
M["break"] = function(self, args)
- self.state = "break"
- -- send response to previous command
- core.previous_context_response(self)
- -- and then response to break command itself
- dbgp.send_xml(self.skt, { tag = "response", attr = { command = "break", transaction_id = args.i, success = 1 } } )
- return false
+ self.state = "break"
+ -- send response to previous command
+ core.previous_context_response(self)
+ -- and then response to break command itself
+ dbgp.send_xml(self.skt, { tag = "response", attr = { command = "break", transaction_id = args.i, success = 1 } } )
+ return false
end
function M.status(self, args)
- dbgp.send_xml(self.skt, { tag = "response", attr = {
- command = "status",
- reason = "ok",
- status = self.state,
- transaction_id = args.i } } )
+ dbgp.send_xml(self.skt, { tag = "response", attr = {
+ command = "status",
+ reason = "ok",
+ status = self.state,
+ transaction_id = args.i } } )
end
function M.stop(self, args)
- dbgp.send_xml(self.skt, { tag = "response", attr = {
- command = "stop",
- reason = "ok",
- status = "stopped",
- transaction_id = args.i } } )
- self.skt:close()
- os.exit(1)
+ dbgp.send_xml(self.skt, { tag = "response", attr = {
+ command = "stop",
+ reason = "ok",
+ status = "stopped",
+ transaction_id = args.i } } )
+ self.skt:close()
+ os.exit(1)
end
function M.feature_get(self, args)
- local name = args.n
- local response = util.features[name] or (not not M[name])
- dbgp.send_xml(self.skt, { tag = "response", attr = {
- command = "feature_get",
- feature_name = name,
- supported = response and "1" or "0",
- transaction_id = args.i },
- tostring(response) } )
+ local name = args.n
+ local response = util.features[name] or (not not M[name])
+ dbgp.send_xml(self.skt, { tag = "response", attr = {
+ command = "feature_get",
+ feature_name = name,
+ supported = response and "1" or "0",
+ transaction_id = args.i },
+ tostring(response) } )
end
function M.feature_set(self, args)
- local name, value = args.n, args.v
- local success = pcall(function() util.features[name] = value end)
- dbgp.send_xml(self.skt, { tag = "response", attr = {
- command = "feature_set",
- feature = name,
- success = success and 1 or 0,
- transaction_id = args.i
- } } )
+ local name, value = args.n, args.v
+ local success = pcall(function() util.features[name] = value end)
+ dbgp.send_xml(self.skt, { tag = "response", attr = {
+ command = "feature_set",
+ feature = name,
+ success = success and 1 or 0,
+ transaction_id = args.i
+ } } )
end
function M.typemap_get(self, args)
- local function gentype(name, type, xsdtype)
- return { tag = "map", atts = { name = name, type = type, ["xsi:type"] = xsdtype } }
- end
-
- dbgp.send_xml(self.skt, { tag = "response", attr = {
- command = "typemap_get",
- transaction_id = args.i,
- ["xmlns:xsi"] = "http://www.w3.org/2001/XMLSchema-instance",
- ["xmlns:xsd"] = "http://www.w3.org/2001/XMLSchema",
- },
- gentype("nil", "null"),
- gentype("boolean", "bool", "xsd:boolean"),
- gentype("number", "float", "xsd:float"),
- gentype("string", "string", "xsd:string"),
- gentype("function", "resource"),
- gentype("userdata", "resource"),
- gentype("thread", "resource"),
- gentype("table", "hash"),
- gentype("sequence", "array"), -- artificial type to represent sequences (1-n continuous indexes)
- gentype("multival", "array"), -- used to represent return values
- } )
+ local function gentype(name, type, xsdtype)
+ return { tag = "map", atts = { name = name, type = type, ["xsi:type"] = xsdtype } }
+ end
+
+ dbgp.send_xml(self.skt, { tag = "response", attr = {
+ command = "typemap_get",
+ transaction_id = args.i,
+ ["xmlns:xsi"] = "http://www.w3.org/2001/XMLSchema-instance",
+ ["xmlns:xsd"] = "http://www.w3.org/2001/XMLSchema",
+ },
+ gentype("nil", "null"),
+ gentype("boolean", "bool", "xsd:boolean"),
+ gentype("number", "float", "xsd:float"),
+ gentype("string", "string", "xsd:string"),
+ gentype("function", "resource"),
+ gentype("userdata", "resource"),
+ gentype("thread", "resource"),
+ gentype("table", "hash"),
+ gentype("sequence", "array"), -- artificial type to represent sequences (1-n continuous indexes)
+ gentype("multival", "array"), -- used to represent return values
+ } )
end
function M.run(self) return true end
function M.step_over(self)
- core.events.register("over")
- return true
+ core.events.register("over")
+ return true
end
function M.step_out(self)
- core.events.register("out")
- return true
+ core.events.register("out")
+ return true
end
function M.step_into(self)
- core.events.register("into")
- return true
+ core.events.register("into")
+ return true
end
function M.eval(self, args, data)
- log("DEBUG", "Going to eval "..data)
- local result, err, success
- local env = self.stack(self.coro, 0)
- -- first, try to load as expression
- -- DBGp does not support stack level here, see http://bugs.activestate.com/show_bug.cgi?id=81178
- local func, err = util.loadin("return "..data, env)
-
- -- if it is not an expression, try as statement (assignment, ...)
- if not func then
- func, err = util.loadin(data, env)
+ log("DEBUG", "Going to eval "..data)
+ local result, err, success
+ local env = self.stack(self.coro, 0)
+ -- first, try to load as expression
+ -- DBGp does not support stack level here, see http://bugs.activestate.com/show_bug.cgi?id=81178
+ local func, err = util.loadin("return "..data, env)
+
+ -- if it is not an expression, try as statement (assignment, ...)
+ if not func then
+ func, err = util.loadin(data, env)
+ end
+
+ if func then
+ success, result = pcall(function() return introspection.Multival(func()) end)
+ if not success then err = result end
+ end
+
+ local response = { tag = "response", attr = { command = "eval", transaction_id = args.i } }
+ if not err then
+ local nresults = result.n
+ if nresults == 1 then result = result[1] end
+
+ -- store result for further use (property_*)
+ -- TODO: this could be optimized: this is only used for Expressions view and totally useless for interactive console,
+ -- so storing result or not could be set by an argument
+ local idx
+ if nresults > 0 then
+ local cache = env[context.Context[-1]]
+ idx = #cache + 1
+ cache[idx] = result
end
-
- if func then
- success, result = pcall(function() return introspection.Multival(func()) end)
- if not success then err = result end
- end
-
- local response = { tag = "response", attr = { command = "eval", transaction_id = args.i } }
- if not err then
- local nresults = result.n
- if nresults == 1 then result = result[1] end
-
- -- store result for further use (property_*)
- -- TODO: this could be optimized: this is only used for Expressions view and totally useless for interactive console,
- -- so storing result or not could be set by an argument
- local idx
- if nresults > 0 then
- local cache = env[context.Context[-1]]
- idx = #cache + 1
- cache[idx] = result
- end
-
- -- As of Lua 5.1, the maximum stack size (and result count) is 8000, this limit is used to fit all results in one page
- response[1] = introspection.make_property(-1, result, idx or "", nil, 1, 8000, 0, nil)
- response.attr.success = 1
- else
- response.attr.success = 0
- response[1] = dbgp.make_error(206, err)
- end
- dbgp.send_xml(self.skt, response)
+
+ -- As of Lua 5.1, the maximum stack size (and result count) is 8000, this limit is used to fit all results in one page
+ response[1] = introspection.make_property(-1, result, idx or "", nil, 1, 8000, 0, nil)
+ response.attr.success = 1
+ else
+ response.attr.success = 0
+ response[1] = dbgp.make_error(206, err)
+ end
+ dbgp.send_xml(self.skt, response)
end
function M.breakpoint_set(self, args, data)
- if args.o and not core.breakpoints.hit_conditions[args.o] then dbgp.error(200, "Invalid hit_condition operator: "..args.o) end
-
- local filename, lineno = args.f, tonumber(args.n)
- local bp = {
- type = args.t,
- state = args.s or "enabled",
- temporary = args.r == "1", -- "0" or nil makes this property false
- hit_count = 0,
- filename = filename,
- lineno = lineno,
- hit_value = tonumber(args.h or 0),
- hit_condition = args.o or ">=",
- }
-
- if args.t == "conditional" then
- bp.expression = data
- -- the expression is compiled only once
- bp.condition = dbgp.assert(207, loadstring("return (" .. data .. ")"))
- elseif args.t ~= "line" then dbgp.error(201, "BP type " .. args.t .. " not yet supported") end
-
- local bpid = core.breakpoints.insert(bp)
- dbgp.send_xml(self.skt, { tag = "response", attr = { command = "breakpoint_set", transaction_id = args.i, state = bp.state, id = bpid } } )
+ if args.o and not core.breakpoints.hit_conditions[args.o] then dbgp.error(200, "Invalid hit_condition operator: "..args.o) end
+
+ local filename, lineno = args.f, tonumber(args.n)
+ local bp = {
+ type = args.t,
+ state = args.s or "enabled",
+ temporary = args.r == "1", -- "0" or nil makes this property false
+ hit_count = 0,
+ filename = filename,
+ lineno = lineno,
+ hit_value = tonumber(args.h or 0),
+ hit_condition = args.o or ">=",
+ }
+
+ if args.t == "conditional" then
+ bp.expression = data
+ -- the expression is compiled only once
+ bp.condition = dbgp.assert(207, loadstring("return (" .. data .. ")"))
+ elseif args.t ~= "line" then dbgp.error(201, "BP type " .. args.t .. " not yet supported") end
+
+ local bpid = core.breakpoints.insert(bp)
+ dbgp.send_xml(self.skt, { tag = "response", attr = { command = "breakpoint_set", transaction_id = args.i, state = bp.state, id = bpid } } )
end
function M.breakpoint_get(self, args)
- dbgp.send_xml(self.skt, { tag = "response",
- attr = { command = "breakpoint_get", transaction_id = args.i },
- dbgp.assert(205, core.breakpoints.get_xml(tonumber(args.d))) })
+ dbgp.send_xml(self.skt, { tag = "response",
+ attr = { command = "breakpoint_get", transaction_id = args.i },
+ dbgp.assert(205, core.breakpoints.get_xml(tonumber(args.d))) })
end
function M.breakpoint_list(self, args)
- local bps = { tag = "response", attr = { command = "breakpoint_list", transaction_id = args.i } }
- for id, bp in pairs(core.breakpoints.get()) do bps[#bps + 1] = core.breakpoints.get_xml(id) end
- dbgp.send_xml(self.skt, bps)
+ local bps = { tag = "response", attr = { command = "breakpoint_list", transaction_id = args.i } }
+ for id, bp in pairs(core.breakpoints.get()) do bps[#bps + 1] = core.breakpoints.get_xml(id) end
+ dbgp.send_xml(self.skt, bps)
end
function M.breakpoint_update(self, args)
- local bp = core.breakpoints.get(tonumber(args.d))
- if not bp then dbgp.error(205, "No such breakpint "..args.d) end
- if args.o and not core.breakpoints.hit_conditions[args.o] then dbgp.error(200, "Invalid hit_condition operator: "..args.o) end
-
- local response = { tag = "response", attr = { command = "breakpoint_update", transaction_id = args.i } }
- bp.state = args.s or bp.state
- bp.lineno = tonumber(args.n or bp.lineno)
- bp.hit_value = tonumber(args.h or bp.hit_value)
- bp.hit_condition = args.o or bp.hit_condition
- dbgp.send_xml(self.skt, response)
+ local bp = core.breakpoints.get(tonumber(args.d))
+ if not bp then dbgp.error(205, "No such breakpint "..args.d) end
+ if args.o and not core.breakpoints.hit_conditions[args.o] then dbgp.error(200, "Invalid hit_condition operator: "..args.o) end
+
+ local response = { tag = "response", attr = { command = "breakpoint_update", transaction_id = args.i } }
+ bp.state = args.s or bp.state
+ bp.lineno = tonumber(args.n or bp.lineno)
+ bp.hit_value = tonumber(args.h or bp.hit_value)
+ bp.hit_condition = args.o or bp.hit_condition
+ dbgp.send_xml(self.skt, response)
end
function M.breakpoint_remove(self, args)
- local response = { tag = "response", attr = { command = "breakpoint_remove", transaction_id = args.i } }
- if not core.breakpoints.remove(tonumber(args.d)) then dbgp.error(205, "No such breakpint "..args.d) end
- dbgp.send_xml(self.skt, response)
+ local response = { tag = "response", attr = { command = "breakpoint_remove", transaction_id = args.i } }
+ if not core.breakpoints.remove(tonumber(args.d)) then dbgp.error(205, "No such breakpint "..args.d) end
+ dbgp.send_xml(self.skt, response)
end
function M.stack_depth(self, args)
- local depth = 0
- local coro = get_coroutine(self, args.o)
- for level = 0, math.huge do
- local info = coro:getinfo(level, "St")
- if not info then break end -- end of stack
- depth = depth + 1
- if info.istailcall then depth = depth + 1 end -- a 'fake' level is added in that case
- if info.what == "main" then break end -- levels below main chunk are not interesting
- end
- dbgp.send_xml(self.skt, { tag = "response", attr = { command = "stack_depth", transaction_id = args.i, depth = depth} } )
+ local depth = 0
+ local coro = get_coroutine(self, args.o)
+ for level = 0, math.huge do
+ local info = coro:getinfo(level, "St")
+ if not info then break end -- end of stack
+ depth = depth + 1
+ if info.istailcall then depth = depth + 1 end -- a 'fake' level is added in that case
+ if info.what == "main" then break end -- levels below main chunk are not interesting
+ end
+ dbgp.send_xml(self.skt, { tag = "response", attr = { command = "stack_depth", transaction_id = args.i, depth = depth} } )
end
function M.stack_get(self, args) -- TODO: dynamic code
- -- special URIs to identify unreachable stack levels
- local what2uri = {
- tail = "tailreturn:/",
- C = "ccode:/",
- }
-
- local function make_level(info, level)
- local attr = { level = level, where = info.name, type="file" }
- local uri = platform.get_uri(info.source)
- if uri and info.currentline then -- reachable level
- attr.filename = uri
- attr.lineno = info.currentline
- else
- attr.filename = what2uri[info.what] or "unknown:/"
- attr.lineno = -1
- end
- return { tag = "stack", attr = attr }
+ -- special URIs to identify unreachable stack levels
+ local what2uri = {
+ tail = "tailreturn:/",
+ C = "ccode:/",
+ }
+
+local function make_level(info, level)
+ local attr = { level = level, where = info.name, type="file" }
+ local uri = platform.get_uri(info.source)
+ if uri and info.currentline then -- reachable level
+ attr.filename = uri
+ attr.lineno = info.currentline
+ else
+ attr.filename = what2uri[info.what] or "unknown:/"
+ attr.lineno = -1
+ end
+ return { tag = "stack", attr = attr }
+end
+
+local node = { tag = "response", attr = { command = "stack_get", transaction_id = args.i} }
+local coro = get_coroutine(self, args.o)
+
+if args.d then
+ local stack_level = tonumber(args.d)
+ node[#node+1] = make_level(coro:getinfo(stack_level, "nSl"), stack_level)
+else
+ for i=0, math.huge do
+ local info = coro:getinfo(i, "nSlt")
+ if not info then break end
+ node[#node+1] = make_level(info, i)
+ -- add a fake level of stack for tail calls (tells user that the function has not been called directly)
+ if info.istailcall then
+ node[#node+1] = { tag = "stack", attr = { level=i, type="file", filename="tailreturn:/", lineno=-1 } }
end
-
- local node = { tag = "response", attr = { command = "stack_get", transaction_id = args.i} }
- local coro = get_coroutine(self, args.o)
-
- if args.d then
- local stack_level = tonumber(args.d)
- node[#node+1] = make_level(coro:getinfo(stack_level, "nSl"), stack_level)
- else
- for i=0, math.huge do
- local info = coro:getinfo(i, "nSlt")
- if not info then break end
- node[#node+1] = make_level(info, i)
- -- add a fake level of stack for tail calls (tells user that the function has not been called directly)
- if info.istailcall then
- node[#node+1] = { tag = "stack", attr = { level=i, type="file", filename="tailreturn:/", lineno=-1 } }
- end
- if info.what == "main" then break end -- levels below main chunk are not interesting
- end
- end
-
- dbgp.send_xml(self.skt, node)
+ if info.what == "main" then break end -- levels below main chunk are not interesting
+ end
+end
+
+dbgp.send_xml(self.skt, node)
end
--- Lists all active coroutines.
--- Returns a list of active coroutines with their id (an arbitrary string) to query stack and properties. The id is
+-- Returns a list of active coroutines with their id (an arbitrary string) to query stack and properties. The id is
-- guaranteed to be unique and stable for all coroutine life (they can be reused as long as coroutine exists).
-- Others commands such as stack_get or property_* commands takes an additional -o switch to query a particular cOroutine.
-- If the switch is not given, running coroutine will be used.
-- In case of error on coroutines (most likely coroutine not found or dead), an error 399 is thrown.
-- Note there is an important limitation due to Lua 5.1 coroutine implementation: you cannot query main "coroutine" from
-- another one, so main coroutine is not in returned list (this will change with Lua 5.2).
---
+--
-- This is a non-standard command. The returned XML has the following strucuture:
-- <response command="coroutine_list" transaction_id="0">
-- <coroutine name="<some printtable name>" id="<coroutine id>" running="0|1" />
-- ...
-- </response>
function M.coroutine_list(self, args)
- local running = self.coro[1]
- local coroutines = { tag = "response", attr = { command = "coroutine_list", transaction_id = args.i } }
- -- as any operation on main coroutine will fail, it is not yet listed
- -- coroutines[1] = { name = "coroutine", attr = { id = 0, name = "main", running = (running == nil) and "1" or "0" } }
- for id, coro in pairs(core.active_coroutines.from_id) do
- if id ~= "n" then
- coroutines[#coroutines + 1] = { tag = "coroutine", attr = { id = id, name = tostring(coro), running = (coro == running) and "1" or "0" } }
- end
+ local running = self.coro[1]
+ local coroutines = { tag = "response", attr = { command = "coroutine_list", transaction_id = args.i } }
+ -- as any operation on main coroutine will fail, it is not yet listed
+ -- coroutines[1] = { name = "coroutine", attr = { id = 0, name = "main", running = (running == nil) and "1" or "0" } }
+ for id, coro in pairs(core.active_coroutines.from_id) do
+ if id ~= "n" then
+ coroutines[#coroutines + 1] = { tag = "coroutine", attr = { id = id, name = tostring(coro), running = (coro == running) and "1" or "0" } }
end
- dbgp.send_xml(self.skt, coroutines)
+ end
+ dbgp.send_xml(self.skt, coroutines)
end
function M.context_names(self, args)
- local coro = get_coroutine(self, args.o)
- local level = tonumber(args.d or 0)
- local info = coro:getinfo(level, "f") or dbgp.error(301, "No such stack level "..tostring(level))
-
- -- All contexts are always passed, even if empty. This is how DLTK expect context, what about others ?
- local contexts = {
- tag = "response", attr = { command = "context_names", transaction_id = args.i },
- { tag = "context", attr = { name = "Local", id = 0 } },
- { tag = "context", attr = { name = "Upvalue", id = 2 } },
- { tag = "context", attr = { name = "Global", id = 1 } },
- }
-
- dbgp.send_xml(self.skt, contexts)
+ local coro = get_coroutine(self, args.o)
+ local level = tonumber(args.d or 0)
+ local info = coro:getinfo(level, "f") or dbgp.error(301, "No such stack level "..tostring(level))
+
+ -- All contexts are always passed, even if empty. This is how DLTK expect context, what about others ?
+ local contexts = {
+ tag = "response", attr = { command = "context_names", transaction_id = args.i },
+ { tag = "context", attr = { name = "Local", id = 0 } },
+ { tag = "context", attr = { name = "Upvalue", id = 2 } },
+ { tag = "context", attr = { name = "Global", id = 1 } },
+ }
+
+ dbgp.send_xml(self.skt, contexts)
end
function M.context_get(self, args)
- local cxt_num = tonumber(args.c or 0)
- local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
- local level = tonumber(args.d or 0)
- local coro = get_coroutine(self, args.o)
- local cxt = self.stack(coro, level)
-
- local properties = { tag = "response", attr = { command = "context_get", transaction_id = args.i, context = context} }
- -- iteration over global is different (this could be unified in Lua 5.2 thanks to __pairs metamethod)
- for name, val in (cxt_num == 1 and next or getmetatable(cxt[cxt_id]).iterator), cxt[cxt_id], nil do
- -- the DBGp specification is not clear about the depth of a context_get, but a recursive get could be *really* slow in Lua
- properties[#properties + 1] = introspection.make_property(cxt_num, val, name, nil, 0, util.features.max_children, 0,
- util.features.max_data, cxt_num ~= 1)
- end
-
- dbgp.send_xml(self.skt, properties)
+ local cxt_num = tonumber(args.c or 0)
+ local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
+ local level = tonumber(args.d or 0)
+ local coro = get_coroutine(self, args.o)
+ local cxt = self.stack(coro, level)
+
+ local properties = { tag = "response", attr = { command = "context_get", transaction_id = args.i, context = context} }
+ -- iteration over global is different (this could be unified in Lua 5.2 thanks to __pairs metamethod)
+ for name, val in (cxt_num == 1 and next or getmetatable(cxt[cxt_id]).iterator), cxt[cxt_id], nil do
+ -- the DBGp specification is not clear about the depth of a context_get, but a recursive get could be *really* slow in Lua
+ properties[#properties + 1] = introspection.make_property(cxt_num, val, name, nil, 0, util.features.max_children, 0,
+ util.features.max_data, cxt_num ~= 1)
+ end
+
+ dbgp.send_xml(self.skt, properties)
end
-------------------------------------------------------------------------------
@@ -358,104 +358,104 @@
-- It notably contain a collection of proxy table which handle transparentely get/set operations on special fields
-- and the cache of complex keys.
local property_evaluation_environment = {
- key_cache = introspection.key_cache,
- metatable = setmetatable({ }, {
- __index = function(self, tbl) return getmetatable(tbl) end,
- __newindex = function(self, tbl, mt) return setmetatable(tbl, mt) end,
- }),
- environment = util.eval_env,
+ key_cache = introspection.key_cache,
+ metatable = setmetatable({ }, {
+ __index = function(self, tbl) return getmetatable(tbl) end,
+ __newindex = function(self, tbl, mt) return setmetatable(tbl, mt) end,
+ }),
+ environment = util.eval_env,
}
-- to allows to be set as metatable
property_evaluation_environment.__index = property_evaluation_environment
function M.property_get(self, args)
- --TODO BUG ECLIPSE TOOLSLINUX-99 352316
- local cxt_num, name = assert(util.unb64(args.n):match("^(%-?%d+)|(.*)$"))
- cxt_num = tonumber(args.c or cxt_num)
- local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
- local level = tonumber(args.d or 0)
- local coro = get_coroutine(self, args.o)
- local size = tonumber(args.m or util.features.max_data)
- if size < 0 then size = nil end -- call from property_value
- local page = tonumber(args.p or 0)
- local cxt = self.stack(coro, level)
- local chunk = dbgp.assert(206, util.loadin("return "..name, property_evaluation_environment))
- local prop = select(2, dbgp.assert(300, pcall(chunk, cxt[cxt_id])))
- local response = introspection.make_property(cxt_num, prop, name, name, util.features.max_depth, util.features.max_children, page, size)
- -- make_property is not able to flag special variables as such when they are at root of property
- -- special variables queries are in the form "<proxy name>[(...)[a][b]<...>]"
- -- TODO: such parsing is far from perfect
- if name:match("^[%w_]+%[.-%b[]%]$") == name then response.attr.type = "special" end
- dbgp.send_xml(self.skt, { tag = "response",
- attr = { command = "property_get", transaction_id = args.i, context = context},
- response } )
+ --TODO BUG ECLIPSE TOOLSLINUX-99 352316
+ local cxt_num, name = assert(util.unb64(args.n):match("^(%-?%d+)|(.*)$"))
+ cxt_num = tonumber(args.c or cxt_num)
+ local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
+ local level = tonumber(args.d or 0)
+ local coro = get_coroutine(self, args.o)
+ local size = tonumber(args.m or util.features.max_data)
+ if size < 0 then size = nil end -- call from property_value
+ local page = tonumber(args.p or 0)
+ local cxt = self.stack(coro, level)
+ local chunk = dbgp.assert(206, util.loadin("return "..name, property_evaluation_environment))
+ local prop = select(2, dbgp.assert(300, pcall(chunk, cxt[cxt_id])))
+ local response = introspection.make_property(cxt_num, prop, name, name, util.features.max_depth, util.features.max_children, page, size)
+ -- make_property is not able to flag special variables as such when they are at root of property
+ -- special variables queries are in the form "<proxy name>[(...)[a][b]<...>]"
+ -- TODO: such parsing is far from perfect
+ if name:match("^[%w_]+%[.-%b[]%]$") == name then response.attr.type = "special" end
+ dbgp.send_xml(self.skt, { tag = "response",
+ attr = { command = "property_get", transaction_id = args.i, context = context},
+ response } )
end
function M.property_value(self, args)
- args.m = -1
- M.property_get(self, args)
+ args.m = -1
+ M.property_get(self, args)
end
function M.property_set(self, args, data)
- local cxt_num, name = assert(util.unb64(args.n):match("^(%-?%d+)|(.*)$"))
- cxt_num = tonumber(args.c or cxt_num)
- local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
- local level = tonumber(args.d or 0)
- local coro = get_coroutine(self, args.o)
- local cxt = self.stack(coro, level)
-
- -- evaluate the new value in the local context
- local value = select(2, dbgp.assert(206, pcall(dbgp.assert(206, util.loadin("return "..data, cxt)))))
-
- local chunk = dbgp.assert(206, util.loadin(name .. " = value", setmetatable({ value = value }, property_evaluation_environment)))
- dbgp.assert(206, pcall(chunk, cxt[cxt_id]))
- dbgp.send_xml(self.skt, { tag = "response", attr = { success = 1, transaction_id = args.i } } )
+ local cxt_num, name = assert(util.unb64(args.n):match("^(%-?%d+)|(.*)$"))
+ cxt_num = tonumber(args.c or cxt_num)
+ local cxt_id = context.Context[cxt_num] or dbgp.error(302, "No such context: "..tostring(cxt_num))
+ local level = tonumber(args.d or 0)
+ local coro = get_coroutine(self, args.o)
+ local cxt = self.stack(coro, level)
+
+ -- evaluate the new value in the local context
+ local value = select(2, dbgp.assert(206, pcall(dbgp.assert(206, util.loadin("return "..data, cxt)))))
+
+ local chunk = dbgp.assert(206, util.loadin(name .. " = value", setmetatable({ value = value }, property_evaluation_environment)))
+ dbgp.assert(206, pcall(chunk, cxt[cxt_id]))
+ dbgp.send_xml(self.skt, { tag = "response", attr = { success = 1, transaction_id = args.i } } )
end
--TODO dynamic code handling
-- The DBGp specification is not clear about the line number meaning, this implementation is 1-based and numbers are inclusive
function M.source(self, args)
- local path
- if args.f then
- path = platform.get_path(args.f)
- else
- path = self.coro:getinfo(0, "S").source
- assert(path:sub(1,1) == "@")
- path = path:sub(2)
- end
- local file, err = io.open(path)
- if not file then dbgp.error(100, err, { success = 0 }) end
- -- Try to identify compiled files
- if file:read(1) == "\033" then dbgp.error(100, args.f.." is bytecode", { success = 0 }) end
- file:seek("set", 0)
-
-
- local srclines = { }
- local beginline, endline, currentline = tonumber(args.b or 0), tonumber(args.e or math.huge), 0
- for line in file:lines() do
- currentline = currentline + 1
- if currentline >= beginline and currentline <= endline then
- srclines[#srclines + 1] = line
- elseif currentline >= endline then break end
- end
- file:close()
- srclines[#srclines + 1] = "" -- to add a trailing \n
-
- dbgp.send_xml(self.skt, { tag = "response",
- attr = { command = "source", transaction_id = args.i, success = 1},
- util.b64(table.concat(srclines, "\n")) })
+ local path
+ if args.f then
+ path = platform.get_path(args.f)
+ else
+ path = self.coro:getinfo(0, "S").source
+ assert(path:sub(1,1) == "@")
+ path = path:sub(2)
+ end
+ local file, err = io.open(path)
+ if not file then dbgp.error(100, err, { success = 0 }) end
+ -- Try to identify compiled files
+ if file:read(1) == "\033" then dbgp.error(100, args.f.." is bytecode", { success = 0 }) end
+ file:seek("set", 0)
+
+
+ local srclines = { }
+ local beginline, endline, currentline = tonumber(args.b or 0), tonumber(args.e or math.huge), 0
+ for line in file:lines() do
+ currentline = currentline + 1
+ if currentline >= beginline and currentline <= endline then
+ srclines[#srclines + 1] = line
+ elseif currentline >= endline then break end
+ end
+ file:close()
+ srclines[#srclines + 1] = "" -- to add a trailing \n
+
+ dbgp.send_xml(self.skt, { tag = "response",
+ attr = { command = "source", transaction_id = args.i, success = 1},
+ util.b64(table.concat(srclines, "\n")) })
end
-- Factory for both stdout and stderr commands, change file descriptor in io
local function output_command_handler_factory(mode)
- return function(self, args)
- if args.c == "0" then -- disable
- io[mode] = io.base[mode]
- else
- io[mode] = setmetatable({ skt = self.skt, mode = mode }, args.c == "1" and core.copy_output or core.redirect_output)
- end
- dbgp.send_xml(self.skt, { tag = "response", attr = { command = mode, transaction_id = args.i, success = "1" } } )
+ return function(self, args)
+ if args.c == "0" then -- disable
+ io[mode] = io.base[mode]
+ else
+ io[mode] = setmetatable({ skt = self.skt, mode = mode }, args.c == "1" and core.copy_output or core.redirect_output)
end
+ dbgp.send_xml(self.skt, { tag = "response", attr = { command = mode, transaction_id = args.i, success = "1" } } )
+ end
end
M.stdout = output_command_handler_factory("stdout")
diff --git a/libraries/luadbgpclient/debugger/context.lua b/libraries/luadbgpclient/debugger/context.lua
index 701828b..d345c92 100644
--- a/libraries/luadbgpclient/debugger/context.lua
+++ b/libraries/luadbgpclient/debugger/context.lua
@@ -21,18 +21,18 @@
local getglobals
if _VERSION == "Lua 5.1" then
- getglobals = function(f) return getfenv(f) end
+ getglobals = function(f) return getfenv(f) end
elseif _VERSION == "Lua 5.2" then
- getglobals = function(f, cxt)
- -- 'global' environment: this is either the local _ENV or upvalue _ENV. A special case happen when a
- -- function does not reference any global variable: the upvalue _ENV may not exist at all. In this case,
- -- global environment is not relevant so it is fixed to an empty table. Another solution would be to set it
- -- to the environment from above stack level but it would require some overhead (especially if multiple
- -- levels must be instantiated)
- if cxt[LOCAL][STORE]["_ENV"] then return cxt[LOCAL]["_ENV"]
- elseif cxt[UPVAL][STORE]["_ENV"] then return cxt[UPVAL]["_ENV"]
- else return { } end
- end
+ getglobals = function(f, cxt)
+ -- 'global' environment: this is either the local _ENV or upvalue _ENV. A special case happen when a
+ -- function does not reference any global variable: the upvalue _ENV may not exist at all. In this case,
+ -- global environment is not relevant so it is fixed to an empty table. Another solution would be to set it
+ -- to the environment from above stack level but it would require some overhead (especially if multiple
+ -- levels must be instantiated)
+ if cxt[LOCAL][STORE]["_ENV"] then return cxt[LOCAL]["_ENV"]
+ elseif cxt[UPVAL][STORE]["_ENV"] then return cxt[UPVAL]["_ENV"]
+ else return { } end
+ end
end
--- Captures variables for given stack level. The capture contains local, upvalues and global variables.
@@ -41,140 +41,140 @@
-- The individual local and upvalues context are also available and can be queried and modified with indexed notation too.
-- These objects are NOT persistant and must not be used outside the debugger loop which instanciated them !
M.Context = {
- -- Context identifiers can be accessed by their DBGp context ID
- [0] = LOCAL,
- [1] = GLOBAL, -- DLTK internal ID for globals is 1
- [2] = UPVAL,
- -- EVAL is used to keep results from eval in cache in order to browse or modify them, results are stored as sequence
- [-1] = EVAL,
- STORE = STORE,
-
- -- gets a variable by name with correct handling of Lua scope chain
- -- the or chain does not work here beacause __index metamethod would raise an error instead of returning nil
+ -- Context identifiers can be accessed by their DBGp context ID
+ [0] = LOCAL,
+ [1] = GLOBAL, -- DLTK internal ID for globals is 1
+ [2] = UPVAL,
+ -- EVAL is used to keep results from eval in cache in order to browse or modify them, results are stored as sequence
+ [-1] = EVAL,
+ STORE = STORE,
+
+ -- gets a variable by name with correct handling of Lua scope chain
+ -- the or chain does not work here beacause __index metamethod would raise an error instead of returning nil
+ __index = function(self, k)
+ if self[LOCAL][STORE][k] then return self[LOCAL][k]
+ elseif self[UPVAL][STORE][k] then return self[UPVAL][k]
+ else return self[GLOBAL][k] end
+ end,
+ __newindex = function(self, k, v)
+ if self[LOCAL][STORE][k] then self[LOCAL][k] = v
+ elseif self[UPVAL][STORE][k] then self[UPVAL][k] = v
+ else self[GLOBAL][k] = v end
+ end,
+
+ -- debug only !!
+ __tostring = function(self)
+ local buf = { "Locals: \n" }
+ for k,v in pairs(self[LOCAL][STORE]) do
+ buf[#buf+1] = "\t"..tostring(k).."("..tostring(v)..")="..tostring(self[LOCAL][k]).."\n"
+ end
+ buf[#buf+1] = "Upvalues: \n"
+ for k,v in pairs(self[UPVAL][STORE]) do
+ buf[#buf+1] = "\t"..tostring(k).."("..tostring(v)..")="..tostring(self[UPVAL][k]).."\n"
+ end
+ return table.concat(buf)
+ end,
+
+ LocalContext = {
__index = function(self, k)
- if self[LOCAL][STORE][k] then return self[LOCAL][k]
- elseif self[UPVAL][STORE][k] then return self[UPVAL][k]
- else return self[GLOBAL][k] end
+ local index = self[STORE][k]
+ if not index then error("The local "..tostring(k).." does not exists.") end
+ local handle = self[HANDLE]
+ return select(2, handle.coro:getlocal(handle.level, index))
end,
__newindex = function(self, k, v)
- if self[LOCAL][STORE][k] then self[LOCAL][k] = v
- elseif self[UPVAL][STORE][k] then self[UPVAL][k] = v
- else self[GLOBAL][k] = v end
+ local index = self[STORE][k]
+ if index then
+ local handle = self[HANDLE]
+ handle.coro:setlocal(handle.level, index, v)
+ else error("Cannot set local " .. k) end
end,
-
- -- debug only !!
- __tostring = function(self)
- local buf = { "Locals: \n" }
- for k,v in pairs(self[LOCAL][STORE]) do
- buf[#buf+1] = "\t"..tostring(k).."("..tostring(v)..")="..tostring(self[LOCAL][k]).."\n"
- end
- buf[#buf+1] = "Upvalues: \n"
- for k,v in pairs(self[UPVAL][STORE]) do
- buf[#buf+1] = "\t"..tostring(k).."("..tostring(v)..")="..tostring(self[UPVAL][k]).."\n"
- end
- return table.concat(buf)
+ -- Lua 5.2 ready :)
+ --__pairs = function(self) return getmetatable(self).iterator, self, nil end,
+ iterator = function(self, prev)
+ local key, index = next(self[STORE], prev)
+ if key then return key, self[key] else return nil end
end,
-
- LocalContext = {
- __index = function(self, k)
- local index = self[STORE][k]
- if not index then error("The local "..tostring(k).." does not exists.") end
- local handle = self[HANDLE]
- return select(2, handle.coro:getlocal(handle.level, index))
- end,
- __newindex = function(self, k, v)
- local index = self[STORE][k]
- if index then
- local handle = self[HANDLE]
- handle.coro:setlocal(handle.level, index, v)
- else error("Cannot set local " .. k) end
- end,
- -- Lua 5.2 ready :)
- --__pairs = function(self) return getmetatable(self).iterator, self, nil end,
- iterator = function(self, prev)
- local key, index = next(self[STORE], prev)
- if key then return key, self[key] else return nil end
- end,
- },
-
- UpvalContext = {
- __index = function(self, k)
- local index = self[STORE][k]
- if not index then error("The local "..tostring(k).." does not exitsts.") end
- return select(2, debug.getupvalue(self[HANDLE], index))
- end,
- __newindex = function(self, k, v)
- local index = self[STORE][k]
- if index then debug.setupvalue(self[HANDLE], index, v)
- else error("Cannot set upvalue " .. k) end
- end,
- -- Lua 5.2 ready :)
- -- __pairs = function(self) return getmetatable(self).iterator, self, nil end,
- iterator = function(self, prev)
- local key, index = next(self[STORE], prev)
- if key then return key, self[key] else return nil end
- end,
- },
-
- --- Context constructor
- -- @param coro (util.*Thread instance) coroutine to map to
- -- @param level (number) stack level do dump (script stack level)
- new = function(cls, coro, level)
- local locals, upvalues = {}, {}
- if level < 0 then dbgp.error(301, "No such stack level: "..tostring(level)) end
- local func = (coro:getinfo(level, "f") or dbgp.error(301, "No such stack level: "..tostring(level))).func
-
- -- local variables
- for i=1, math.huge do
- local name, val = coro:getlocal(level, i)
- if not name then break
- elseif name:sub(1,1) ~= "(" then -- skip internal values
- locals[name] = i
- end
- end
-
- -- upvalues
- for i=1, math.huge do
- local name, val = debug.getupvalue(func, i)
- if not name then break end
- upvalues[name] = i
- end
-
- locals = setmetatable({ [STORE] = locals, [HANDLE] = { level = level, coro = coro } }, cls.LocalContext)
- upvalues = setmetatable({ [STORE] = upvalues, [HANDLE] = func }, cls.UpvalContext)
-
- local result = setmetatable({ [LOCAL] = locals, [UPVAL] = upvalues, [EVAL] = {} }, cls)
- rawset(result, GLOBAL, getglobals(func, result))
- return result
+ },
+
+ UpvalContext = {
+ __index = function(self, k)
+ local index = self[STORE][k]
+ if not index then error("The local "..tostring(k).." does not exitsts.") end
+ return select(2, debug.getupvalue(self[HANDLE], index))
end,
+ __newindex = function(self, k, v)
+ local index = self[STORE][k]
+ if index then debug.setupvalue(self[HANDLE], index, v)
+ else error("Cannot set upvalue " .. k) end
+ end,
+ -- Lua 5.2 ready :)
+ -- __pairs = function(self) return getmetatable(self).iterator, self, nil end,
+ iterator = function(self, prev)
+ local key, index = next(self[STORE], prev)
+ if key then return key, self[key] else return nil end
+ end,
+ },
+
+ --- Context constructor
+ -- @param coro (util.*Thread instance) coroutine to map to
+ -- @param level (number) stack level do dump (script stack level)
+ new = function(cls, coro, level)
+ local locals, upvalues = {}, {}
+ if level < 0 then dbgp.error(301, "No such stack level: "..tostring(level)) end
+ local func = (coro:getinfo(level, "f") or dbgp.error(301, "No such stack level: "..tostring(level))).func
+
+ -- local variables
+ for i=1, math.huge do
+ local name, val = coro:getlocal(level, i)
+ if not name then break
+ elseif name:sub(1,1) ~= "(" then -- skip internal values
+ locals[name] = i
+ end
+ end
+
+ -- upvalues
+ for i=1, math.huge do
+ local name, val = debug.getupvalue(func, i)
+ if not name then break end
+ upvalues[name] = i
+ end
+
+ locals = setmetatable({ [STORE] = locals, [HANDLE] = { level = level, coro = coro } }, cls.LocalContext)
+ upvalues = setmetatable({ [STORE] = upvalues, [HANDLE] = func }, cls.UpvalContext)
+
+ local result = setmetatable({ [LOCAL] = locals, [UPVAL] = upvalues, [EVAL] = {} }, cls)
+ rawset(result, GLOBAL, getglobals(func, result))
+ return result
+ end,
}
---- Handle caching of all instantiated context.
--- Returns a function which takes 2 parameters: thread and stack level and returns the corresponding context. If this
--- context has been already queried there is no new instantiation. A ContextManager is valid only during the debug loop
--- on which it has been instantiated. References to a ContextManager must be lost after the end of debug loop (so
+--- Handle caching of all instantiated context.
+-- Returns a function which takes 2 parameters: thread and stack level and returns the corresponding context. If this
+-- context has been already queried there is no new instantiation. A ContextManager is valid only during the debug loop
+-- on which it has been instantiated. References to a ContextManager must be lost after the end of debug loop (so
-- threads can be collected).
-- If a context cannot be instantiated, an 301 DBGP error is thrown.
function M.ContextManager()
- local cache = { }
- return function(thread, level)
- -- the real coroutine is used as key (not the wrapped instance as its unicity is not guaranteed)
- -- otherwise, true is used to identify current thread (as nil is not a valid table key)
- local key = thread[1] or true
- local thread_contexts = cache[key]
- if not thread_contexts then
- thread_contexts = { }
- cache[key] = thread_contexts
- end
-
- local context = thread_contexts[level]
- if not context then
- context = M.Context:new(thread, level)
- thread_contexts[level] = context
- end
-
- return context
+ local cache = { }
+ return function(thread, level)
+ -- the real coroutine is used as key (not the wrapped instance as its unicity is not guaranteed)
+ -- otherwise, true is used to identify current thread (as nil is not a valid table key)
+ local key = thread[1] or true
+ local thread_contexts = cache[key]
+ if not thread_contexts then
+ thread_contexts = { }
+ cache[key] = thread_contexts
end
+
+ local context = thread_contexts[level]
+ if not context then
+ context = M.Context:new(thread, level)
+ thread_contexts[level] = context
+ end
+
+ return context
+ end
end
return M
diff --git a/libraries/luadbgpclient/debugger/dbgp.lua b/libraries/luadbgpclient/debugger/dbgp.lua
index 72482af..7b1d953 100644
--- a/libraries/luadbgpclient/debugger/dbgp.lua
+++ b/libraries/luadbgpclient/debugger/dbgp.lua
@@ -13,8 +13,8 @@
local util = require "debugger.util"
-local error, setmetatable, type, pairs, ipairs, tostring, tconcat =
- error, setmetatable, type, pairs, ipairs, tostring, table.concat
+local error, setmetatable, type, pairs, ipairs, tostring, tconcat =
+ error, setmetatable, type, pairs, ipairs, tostring, table.concat
local M = { }
@@ -23,11 +23,11 @@
-- @param cmd_args (string) sequence of arguments
-- @return table described above
function M.arg_parse(cmd_args)
- local args = {}
- for arg, val in cmd_args:gmatch("%-(%w) (%S+)") do
- args[arg] = val
- end
- return args
+ local args = {}
+ for arg, val in cmd_args:gmatch("%-(%w) (%S+)") do
+ args[arg] = val
+ end
+ return args
end
--- Parses a command line
@@ -35,26 +35,26 @@
-- @retrun arguments (table)
-- @return data (string, optional)
function M.cmd_parse(cmd)
- local cmd_name, args, data
- if cmd:find("--", 1, true) then -- there is a data part
- cmd_name, args, data = cmd:match("^(%S+)%s+(.*)%s+%-%-%s*(.*)$")
- data = util.unb64(data)
- else
- cmd_name, args = cmd:match("^(%S+)%s+(.*)$")
- end
- return cmd_name, M.arg_parse(args), data
+ local cmd_name, args, data
+ if cmd:find("--", 1, true) then -- there is a data part
+ cmd_name, args, data = cmd:match("^(%S+)%s+(.*)%s+%-%-%s*(.*)$")
+ data = util.unb64(data)
+ else
+ cmd_name, args = cmd:match("^(%S+)%s+(.*)$")
+ end
+ return cmd_name, M.arg_parse(args), data
end
--- Returns the packet read from socket, or nil followed by an error message on errors.
function M.read_packet(skt)
- local size = {}
- while true do
- local byte, err = skt:receive(1)
- if not byte then return nil, err end
- if byte == "\000" then break end
- size[#size+1] = byte
- end
- return tconcat(size)
+ local size = {}
+ while true do
+ local byte, err = skt:receive(1)
+ if not byte then return nil, err end
+ if byte == "\000" then break end
+ size[#size+1] = byte
+ end
+ return tconcat(size)
end
M.DBGP_ERR_METATABLE = {} -- unique object used to identify DBGp errors
@@ -66,7 +66,7 @@
-- @param message message string (optional)
-- @param attr extra attributes to add to the response tag (optional)
function M.error(code, message, attr)
- error(setmetatable({ code = code, message = message, attr = attr or {} }, M.DBGP_ERR_METATABLE), 2)
+ error(setmetatable({ code = code, message = message, attr = attr or {} }, M.DBGP_ERR_METATABLE), 2)
end
--- Like core assert but throws a DBGp error if condition is not met.
@@ -74,8 +74,8 @@
-- @param message condition to test
-- @param ... will be used as error message if test fails.
function M.assert(code, success, ...)
- if not success then M.error(code, (...)) end
- return success, ...
+ if not success then M.error(code, (...)) end
+ return success, ...
end
-- -----------------
@@ -86,43 +86,43 @@
-- Generates a XML string from a Lua Object Model (LOM) table.
-- See http://matthewwild.co.uk/projects/luaexpat/lom.html
function M.lom2str(xml)
- local pieces = { } -- string buffer
+ local pieces = { } -- string buffer
- local function generate(node)
- pieces[#pieces + 1] = "<"..node.tag
+ local function generate(node)
+ pieces[#pieces + 1] = "<"..node.tag
+ pieces[#pieces + 1] = " "
+ -- attribute ordering is not honored here
+ for attr, val in pairs(node.attr or {}) do
+ if type(attr) == "string" then
+ pieces[#pieces + 1] = attr .. '="' .. tostring(val):gsub('["&<]', xmlattr_specialchars) .. '"'
pieces[#pieces + 1] = " "
- -- attribute ordering is not honored here
- for attr, val in pairs(node.attr or {}) do
- if type(attr) == "string" then
- pieces[#pieces + 1] = attr .. '="' .. tostring(val):gsub('["&<]', xmlattr_specialchars) .. '"'
- pieces[#pieces + 1] = " "
- end
- end
- pieces[#pieces] = nil -- remove the last separator (useless)
-
- if node[1] then
- pieces[#pieces + 1] = ">"
- for _, child in ipairs(node) do
- if type(child) == "table" then generate(child)
- else pieces[#pieces + 1] = "<![CDATA[" .. tostring(child) .. "]]>" end
- end
- pieces[#pieces + 1] = "</" .. node.tag .. ">"
- else
- pieces[#pieces + 1] = "/>"
- end
+ end
end
-
- generate(xml)
- return tconcat(pieces)
+ pieces[#pieces] = nil -- remove the last separator (useless)
+
+ if node[1] then
+ pieces[#pieces + 1] = ">"
+ for _, child in ipairs(node) do
+ if type(child) == "table" then generate(child)
+ else pieces[#pieces + 1] = "<![CDATA[" .. tostring(child) .. "]]>" end
+ end
+ pieces[#pieces + 1] = "</" .. node.tag .. ">"
+ else
+ pieces[#pieces + 1] = "/>"
+ end
+ end
+
+ generate(xml)
+ return tconcat(pieces)
end
function M.send_xml(skt, resp)
- if not resp.attr then resp.attr = {} end
- resp.attr.xmlns = "urn:debugger_protocol_v1"
-
- local data = '<?xml version="1.0" encoding="UTF-8" ?>\n'..M.lom2str(resp)
- util.log("DEBUG", "Send " .. data)
- skt:send(tostring(#data).."\000"..data.."\000")
+ if not resp.attr then resp.attr = {} end
+ resp.attr.xmlns = "urn:debugger_protocol_v1"
+
+ local data = '<?xml version="1.0" encoding="UTF-8" ?>\n'..M.lom2str(resp)
+ util.log("DEBUG", "Send " .. data)
+ skt:send(tostring(#data).."\000"..data.."\000")
end
--- Return an XML tag describing a debugger error, with an optional message
@@ -130,11 +130,11 @@
-- @param msg (string, optional) textual description of error
-- @return table, suitable to be converted into XML
function M.make_error(code, msg)
- local elem = { tag = "error", attr = { code = code } }
- if msg then
- elem[1] = { tostring(msg), tag = "message" }
- end
- return elem
+ local elem = { tag = "error", attr = { code = code } }
+ if msg then
+ elem[1] = { tostring(msg), tag = "message" }
+ end
+ return elem
end
return M
diff --git a/libraries/luadbgpclient/debugger/init.lua b/libraries/luadbgpclient/debugger/init.lua
index 90caf5a..a56ef5c 100644
--- a/libraries/luadbgpclient/debugger/init.lua
+++ b/libraries/luadbgpclient/debugger/init.lua
@@ -13,7 +13,7 @@
local debug = require "debug"
--- To avoid cyclic dependency, internal state of the debugger that must be accessed
+-- To avoid cyclic dependency, internal state of the debugger that must be accessed
-- elsewhere (in commands most likely) will be stored in a fake module "debugger.core"
local core = { }
package.loaded["debugger.core"] = core
@@ -46,65 +46,65 @@
-- "BEGIN VERSION DEPENDENT CODE"
local setbpenv -- set environment of a breakpoint (compiled function)
if _VERSION == "Lua 5.1" then
- local setfenv = setfenv
- setbpenv = setfenv
+ local setfenv = setfenv
+ setbpenv = setfenv
elseif _VERSION == "Lua 5.2" then
- local setupvalue = debug.setupvalue
- -- _ENV is the first upvalue
- setbpenv = function(f, t) return setupvalue(f, 1, t) end
+ local setupvalue = debug.setupvalue
+ -- _ENV is the first upvalue
+ setbpenv = function(f, t) return setupvalue(f, 1, t) end
else error(_VERSION .. "is not supported.") end
-- "END VERSION DEPENDENT CODE"
-------------------------------------------------------------------------------
-- Output redirection handling
-------------------------------------------------------------------------------
--- Override standard output functions & constants to redirect data written to these files to IDE too.
+-- Override standard output functions & constants to redirect data written to these files to IDE too.
-- This works only for output done in Lua, output written by C extensions is still go to system output file.
-- references to native values
io.base = { output = io.output, stdin = io.stdin, stdout = io.stdout, stderr = io.stderr }
function print(...)
- local buf = {...}
- for i=1, select("#", ...) do
- buf[i] = tostring(buf[i])
- end
- io.stdout:write(table.concat(buf, "\t") .. "\n")
+ local buf = {...}
+ for i=1, select("#", ...) do
+ buf[i] = tostring(buf[i])
+ end
+ io.stdout:write(table.concat(buf, "\t") .. "\n")
end
-- Actually change standard output file but still return the "fake" stdout
function io.output(output)
- io.base.output(output)
- return io.stdout
+ io.base.output(output)
+ return io.stdout
end
local dummy = function() end
-- metatable for redirecting output (not printed at all in actual output)
core.redirect_output = {
- write = function(self, ...)
- local buf = {...}
- for i=1, select("#", ...) do buf[i] = tostring(buf[i]) end
- buf = table.concat(buf):gsub("\n", "\r\n")
- dbgp.send_xml(self.skt, { tag = "stream", attr = { type=self.mode }, util.b64(buf) } )
- end,
- flush = dummy,
- close = dummy,
- setvbuf = dummy,
- seek = dummy
+ write = function(self, ...)
+ local buf = {...}
+ for i=1, select("#", ...) do buf[i] = tostring(buf[i]) end
+ buf = table.concat(buf):gsub("\n", "\r\n")
+ dbgp.send_xml(self.skt, { tag = "stream", attr = { type=self.mode }, util.b64(buf) } )
+ end,
+ flush = dummy,
+ close = dummy,
+ setvbuf = dummy,
+ seek = dummy
}
core.redirect_output.__index = core.redirect_output
-- metatable for cloning output (outputs to actual system and send to IDE)
core.copy_output = {
- write = function(self, ...)
- core.redirect_output.write(self, ...)
- io.base[self.mode]:write(...)
- end,
- flush = function(self, ...) return self.out:flush(...) end,
- close = function(self, ...) return self.out:close(...) end,
- setvbuf = function(self, ...) return self.out:setvbuf(...) end,
- seek = function(self, ...) return self.out:seek(...) end,
+ write = function(self, ...)
+ core.redirect_output.write(self, ...)
+ io.base[self.mode]:write(...)
+ end,
+ flush = function(self, ...) return self.out:flush(...) end,
+ close = function(self, ...) return self.out:close(...) end,
+ setvbuf = function(self, ...) return self.out:setvbuf(...) end,
+ seek = function(self, ...) return self.out:seek(...) end,
}
core.copy_output.__index = core.copy_output
@@ -118,172 +118,172 @@
-- require that multiple BP at same place must be handled)
-- A BP is a table with all additional properties (type, condition, ...) the id is the string representation of the table.
core.breakpoints = {
- -- functions to call to match hit conditions
- hit_conditions = {
- [">="] = function(value, target) return value >= target end,
- ["=="] = function(value, target) return value == target end,
- ["%"] = function(value, target) return (value % target) == 0 end,
- }
+ -- functions to call to match hit conditions
+ hit_conditions = {
+ [">="] = function(value, target) return value >= target end,
+ ["=="] = function(value, target) return value == target end,
+ ["%"] = function(value, target) return (value % target) == 0 end,
+ }
}
-- tracks events such as step_into or step_over
core.events = { }
do
- local file_mapping = { }
- local id_mapping = { }
- local waiting_sessions = { } -- sessions that wait for an event (over, into, out)
- local step_into = nil -- session that registered a step_into event, if any
- local sequence = 0 -- used to generate breakpoint IDs
+ local file_mapping = { }
+ local id_mapping = { }
+ local waiting_sessions = { } -- sessions that wait for an event (over, into, out)
+ local step_into = nil -- session that registered a step_into event, if any
+ local sequence = 0 -- used to generate breakpoint IDs
- --- Inserts a new breakpoint into registry
- -- @param bp (table) breakpoint data
- -- @param uri (string, optional) Absolute file URI, for line breakpoints
- -- @param line (number, optional) Line where breakpoint stops, for line breakpoints
- -- @return breakpoint identifier
- function core.breakpoints.insert(bp)
- local bpid = sequence
- sequence = bpid + 1
- bp.id = bpid
- -- re-encode the URI to avoid any mismatch (with authority for example)
- local uri = url.parse(bp.filename)
- bp.filename = url.build{ scheme=uri.scheme, authority="", path=platform.normalize(uri.path)}
-
- local filereg = file_mapping[bp.filename]
- if not filereg then
- filereg = { }
- file_mapping[bp.filename] = filereg
- end
-
- local linereg = filereg[bp.lineno]
- if not linereg then
- linereg = {}
- filereg[bp.lineno] = linereg
- end
-
- table.insert(linereg, bp)
-
- id_mapping[bpid] = bp
- return bpid
+ --- Inserts a new breakpoint into registry
+ -- @param bp (table) breakpoint data
+ -- @param uri (string, optional) Absolute file URI, for line breakpoints
+ -- @param line (number, optional) Line where breakpoint stops, for line breakpoints
+ -- @return breakpoint identifier
+ function core.breakpoints.insert(bp)
+ local bpid = sequence
+ sequence = bpid + 1
+ bp.id = bpid
+ -- re-encode the URI to avoid any mismatch (with authority for example)
+ local uri = url.parse(bp.filename)
+ bp.filename = url.build{ scheme=uri.scheme, authority="", path=platform.normalize(uri.path)}
+
+ local filereg = file_mapping[bp.filename]
+ if not filereg then
+ filereg = { }
+ file_mapping[bp.filename] = filereg
end
- --- If breakpoint(s) exists for given file/line, uptates breakpoint counters
- -- and returns whether a breakpoint has matched (boolean)
- function core.breakpoints.at(file, line)
- local bps = file_mapping[file] and file_mapping[file][line]
- if not bps then return nil end
-
- local do_break = false
- for _, bp in pairs(bps) do
- if bp.state == "enabled" then
- local match = true
- if bp.condition then
- -- TODO: this is not the optimal solution because Context can be instantiated twice if the breakpoint matches
- local cxt = context.Context:new(active_session.coro, 0)
- setbpenv(bp.condition, cxt)
- local success, result = pcall(bp.condition)
- if not success then log("ERROR", "Condition evaluation failed for breakpoint at %s:%d: %s", file, line, result) end
- -- debugger always stops if an error occurs
- match = (not success) or result
- end
- if match then
- bp.hit_count = bp.hit_count + 1
- if core.breakpoints.hit_conditions[bp.hit_condition](bp.hit_count, bp.hit_value) then
- if bp.temporary then
- core.breakpoints.remove(bp.id)
- end
- do_break = true
- -- there is no break to handle multiple breakpoints: all hit counts must be updated
- end
- end
+ local linereg = filereg[bp.lineno]
+ if not linereg then
+ linereg = {}
+ filereg[bp.lineno] = linereg
+ end
+
+ table.insert(linereg, bp)
+
+ id_mapping[bpid] = bp
+ return bpid
+ end
+
+ --- If breakpoint(s) exists for given file/line, uptates breakpoint counters
+ -- and returns whether a breakpoint has matched (boolean)
+ function core.breakpoints.at(file, line)
+ local bps = file_mapping[file] and file_mapping[file][line]
+ if not bps then return nil end
+
+ local do_break = false
+ for _, bp in pairs(bps) do
+ if bp.state == "enabled" then
+ local match = true
+ if bp.condition then
+ -- TODO: this is not the optimal solution because Context can be instantiated twice if the breakpoint matches
+ local cxt = context.Context:new(active_session.coro, 0)
+ setbpenv(bp.condition, cxt)
+ local success, result = pcall(bp.condition)
+ if not success then log("ERROR", "Condition evaluation failed for breakpoint at %s:%d: %s", file, line, result) end
+ -- debugger always stops if an error occurs
+ match = (not success) or result
+ end
+ if match then
+ bp.hit_count = bp.hit_count + 1
+ if core.breakpoints.hit_conditions[bp.hit_condition](bp.hit_count, bp.hit_value) then
+ if bp.temporary then
+ core.breakpoints.remove(bp.id)
end
+ do_break = true
+ -- there is no break to handle multiple breakpoints: all hit counts must be updated
+ end
end
- return do_break
+ end
+ end
+ return do_break
+ end
+
+ function core.breakpoints.get(id)
+ if id then return id_mapping[id]
+ else return id_mapping end
+ end
+
+ function core.breakpoints.remove(id)
+ local bp = id_mapping[id]
+ if bp then
+ id_mapping[id] = nil
+ local linereg = file_mapping[bp.filename][bp.lineno]
+ for i=1, #linereg do
+ if linereg[i] == bp then
+ table.remove(linereg, i)
+ break
+ end
+ end
+
+ -- cleanup file_mapping
+ if not next(linereg) then file_mapping[bp.filename][bp.lineno] = nil end
+ if not next(file_mapping[bp.filename]) then file_mapping[bp.filename] = nil end
+ return true
+ end
+ return false
+ end
+
+ --- Returns an XML data structure that describes given breakpoint
+ -- @param id (number) breakpoint ID
+ -- @return Table describing a <breakpooint> tag or nil followed by an error message
+ function core.breakpoints.get_xml(id)
+ local bp = id_mapping[id]
+ if not bp then return nil, "No such breakpoint: "..tostring(id) end
+
+ local response = { tag = "breakpoint", attr = { } }
+ for k,v in pairs(bp) do response.attr[k] = v end
+ if bp.expression then
+ response[1] = { tag = "expression", bp.expression }
end
- function core.breakpoints.get(id)
- if id then return id_mapping[id]
- else return id_mapping end
- end
+ -- internal use only
+ response.attr.expression = nil
+ response.attr.condition = nil
+ response.attr.temporary = nil -- TODO: the specification is not clear whether this should be provided, see other implementations
+ return response
+ end
- function core.breakpoints.remove(id)
- local bp = id_mapping[id]
- if bp then
- id_mapping[id] = nil
- local linereg = file_mapping[bp.filename][bp.lineno]
- for i=1, #linereg do
- if linereg[i] == bp then
- table.remove(linereg, i)
- break
- end
- end
-
- -- cleanup file_mapping
- if not next(linereg) then file_mapping[bp.filename][bp.lineno] = nil end
- if not next(file_mapping[bp.filename]) then file_mapping[bp.filename] = nil end
- return true
- end
- return false
+ --- Register an event to be triggered.
+ -- @param event event name to register (must be "over", "out" or "into")
+ function core.events.register(event)
+ local thread = active_session.coro[1]
+ log("DEBUG", "Registered %s event for %s (%d)", event, tostring(thread), stack_levels[thread])
+ if event == "into" then
+ step_into = true
+ else
+ waiting_sessions[thread] = { event, stack_levels[thread] }
end
-
- --- Returns an XML data structure that describes given breakpoint
- -- @param id (number) breakpoint ID
- -- @return Table describing a <breakpooint> tag or nil followed by an error message
- function core.breakpoints.get_xml(id)
- local bp = id_mapping[id]
- if not bp then return nil, "No such breakpoint: "..tostring(id) end
-
- local response = { tag = "breakpoint", attr = { } }
- for k,v in pairs(bp) do response.attr[k] = v end
- if bp.expression then
- response[1] = { tag = "expression", bp.expression }
- end
-
- -- internal use only
- response.attr.expression = nil
- response.attr.condition = nil
- response.attr.temporary = nil -- TODO: the specification is not clear whether this should be provided, see other implementations
- return response
- end
-
- --- Register an event to be triggered.
- -- @param event event name to register (must be "over", "out" or "into")
- function core.events.register(event)
- local thread = active_session.coro[1]
- log("DEBUG", "Registered %s event for %s (%d)", event, tostring(thread), stack_levels[thread])
- if event == "into" then
- step_into = true
- else
- waiting_sessions[thread] = { event, stack_levels[thread] }
- end
- end
+ end
- --- Returns if an event (step into, over, out) is triggered.
- -- Does *not* discard events (even if they match) as event must be discarded manually if a breakpoint match before anyway.
- -- @return true if an event has matched, false otherwise
- function core.events.does_match()
- if step_into then return true end
-
- local thread = active_session.coro[1]
- local event = waiting_sessions[thread]
- if event then
- local event_type, target_level = unpack(event)
- local current_level = stack_levels[thread]
+ --- Returns if an event (step into, over, out) is triggered.
+ -- Does *not* discard events (even if they match) as event must be discarded manually if a breakpoint match before anyway.
+ -- @return true if an event has matched, false otherwise
+ function core.events.does_match()
+ if step_into then return true end
- if (event_type == "over" and current_level <= target_level) or -- step over
- (event_type == "out" and current_level < target_level) then -- step out
- log("DEBUG", "Event %s matched!", event_type)
- return true
- end
- end
- return false
+ local thread = active_session.coro[1]
+ local event = waiting_sessions[thread]
+ if event then
+ local event_type, target_level = unpack(event)
+ local current_level = stack_levels[thread]
+
+ if (event_type == "over" and current_level <= target_level) or -- step over
+ (event_type == "out" and current_level < target_level) then -- step out
+ log("DEBUG", "Event %s matched!", event_type)
+ return true
+ end
end
-
- --- Discards event for current thread (if any)
- function core.events.discard()
- waiting_sessions[active_session.coro[1]] = nil
- step_into = nil
- end
+ return false
+ end
+
+ --- Discards event for current thread (if any)
+ function core.events.discard()
+ waiting_sessions[active_session.coro[1]] = nil
+ step_into = nil
+ end
end
-------------------------------------------------------------------------------
@@ -292,287 +292,287 @@
--- Send the XML response to the previous continuation command and clear the previous context
function core.previous_context_response(self, reason)
- self.previous_context.status = self.state
- self.previous_context.reason = reason or "ok"
- dbgp.send_xml(self.skt, { tag = "response", attr = self.previous_context } )
- self.previous_context = nil
+ self.previous_context.status = self.state
+ self.previous_context.reason = reason or "ok"
+ dbgp.send_xml(self.skt, { tag = "response", attr = self.previous_context } )
+ self.previous_context = nil
end
local function cleanup()
- coroutine.resume, coroutine.wrap = coresume, cowrap
- for _, coro in pairs(core.active_coroutines.from_id) do
- debug.sethook(coro)
- end
- -- to remove hook on the main coroutine, it must be the current one (otherwise, this is a no-op) and this function
- -- have to be called adain later on the main thread to finish cleaup
- debug.sethook()
- core.active_coroutines.from_id, core.active_coroutines.from_coro = { }, { }
+ coroutine.resume, coroutine.wrap = coresume, cowrap
+ for _, coro in pairs(core.active_coroutines.from_id) do
+ debug.sethook(coro)
+ end
+ -- to remove hook on the main coroutine, it must be the current one (otherwise, this is a no-op) and this function
+ -- have to be called adain later on the main thread to finish cleaup
+ debug.sethook()
+ core.active_coroutines.from_id, core.active_coroutines.from_coro = { }, { }
end
--- This function handles the debugger commands while the execution is paused. This does not use coroutines because there is no
-- way to get main coro in Lua 5.1 (only in 5.2)
local function debugger_loop(self, async_packet)
- self.skt:settimeout(nil) -- set socket blocking
-
- -- in async mode, the debugger does not wait for another command before continuing and does not modify previous_context
- local async_mode = async_packet ~= nil
-
- if self.previous_context and not async_mode then
- self.state = "break"
- core.previous_context_response(self)
- end
- self.stack = context.ContextManager(self.coro) -- will be used to mutualize context allocation for each loop
-
- while true do
- -- reads packet
- local packet = async_packet or dbgp.read_packet(self.skt)
- if not packet then
- log("WARNING", "lost debugger connection")
- cleanup()
- break
- end
+ self.skt:settimeout(nil) -- set socket blocking
- async_packet = nil
- log("DEBUG", packet)
- local cmd, args, data = dbgp.cmd_parse(packet)
-
- -- FIXME: command such as continuations sent in async mode could lead both engine and IDE in inconsistent state :
- -- make a blacklist/whitelist of forbidden or allowed commands in async ?
- -- invoke function
- local func = commands[cmd]
- if func then
- local ok, cont = xpcall(function() return func(self, args, data) end, debug.traceback)
- if not ok then -- internal exception
- local code, msg, attr
- if type(cont) == "table" and getmetatable(cont) == dbgp.DBGP_ERR_METATABLE then
- code, msg, attr = cont.code, cont.message, cont.attr
- else
- code, msg, attr = 998, tostring(cont), { }
- end
- log("ERROR", "Command %s caused: (%d) %s", cmd, code, tostring(msg))
- attr.command, attr.transaction_id = cmd, args.i
- dbgp.send_xml(self.skt, { tag = "response", attr = attr, dbgp.make_error(code, msg) } )
- elseif cont then
- self.previous_context = { command = cmd, transaction_id = args.i }
- break
- elseif cont == nil and async_mode then
- break
- elseif cont == false then -- In case of commands that fully resumes debugger loop, the mode is sync
- async_mode = false
- end
- else
- log("Got unknown command: "..cmd)
- dbgp.send_xml(self.skt, { tag = "response", attr = { command = cmd, transaction_id = args.i, }, dbgp.make_error(4) } )
- end
+ -- in async mode, the debugger does not wait for another command before continuing and does not modify previous_context
+ local async_mode = async_packet ~= nil
+
+ if self.previous_context and not async_mode then
+ self.state = "break"
+ core.previous_context_response(self)
+ end
+ self.stack = context.ContextManager(self.coro) -- will be used to mutualize context allocation for each loop
+
+ while true do
+ -- reads packet
+ local packet = async_packet or dbgp.read_packet(self.skt)
+ if not packet then
+ log("WARNING", "lost debugger connection")
+ cleanup()
+ break
end
-
- self.stack = nil -- free allocated contexts
- self.state = "running"
- self.skt:settimeout(0) -- reset socket to async
+
+ async_packet = nil
+ log("DEBUG", packet)
+ local cmd, args, data = dbgp.cmd_parse(packet)
+
+ -- FIXME: command such as continuations sent in async mode could lead both engine and IDE in inconsistent state :
+ -- make a blacklist/whitelist of forbidden or allowed commands in async ?
+ -- invoke function
+ local func = commands[cmd]
+ if func then
+ local ok, cont = xpcall(function() return func(self, args, data) end, debug.traceback)
+ if not ok then -- internal exception
+ local code, msg, attr
+ if type(cont) == "table" and getmetatable(cont) == dbgp.DBGP_ERR_METATABLE then
+ code, msg, attr = cont.code, cont.message, cont.attr
+ else
+ code, msg, attr = 998, tostring(cont), { }
+ end
+ log("ERROR", "Command %s caused: (%d) %s", cmd, code, tostring(msg))
+ attr.command, attr.transaction_id = cmd, args.i
+ dbgp.send_xml(self.skt, { tag = "response", attr = attr, dbgp.make_error(code, msg) } )
+ elseif cont then
+ self.previous_context = { command = cmd, transaction_id = args.i }
+ break
+ elseif cont == nil and async_mode then
+ break
+ elseif cont == false then -- In case of commands that fully resumes debugger loop, the mode is sync
+ async_mode = false
+ end
+ else
+ log("Got unknown command: "..cmd)
+ dbgp.send_xml(self.skt, { tag = "response", attr = { command = cmd, transaction_id = args.i, }, dbgp.make_error(4) } )
+ end
+ end
+
+ self.stack = nil -- free allocated contexts
+ self.state = "running"
+ self.skt:settimeout(0) -- reset socket to async
end
-- Stack handling can be pretty complex sometimes, especially with LuaJIT (as tail-call optimization are
--- more aggressive as stock Lua). So all debugger stuff is done in another coroutine, which leave the program
+-- more aggressive as stock Lua). So all debugger stuff is done in another coroutine, which leave the program
-- stack in a clean state and allow faster and clearer stack operations (no need to remove all debugger calls
-- from stack for each operation).
--- However, this does not always work with stock Lua 5.1 as the main coroutine cannot be referenced
+-- However, this does not always work with stock Lua 5.1 as the main coroutine cannot be referenced
-- (coroutine.running() return nil). For this particular case, the debugger loop is started on the top of
-- program stack and every stack operation is relative the the hook level (see MainThread in util.lua).
local function line_hook(line)
- local do_break, packet = nil, nil
- local info = active_session.coro:getinfo(0, "S")
- local uri = platform.get_uri(info.source)
- if uri and uri ~= debugger_uri and uri ~= transportmodule_uri then -- the debugger does not break if the source is not known
- do_break = core.breakpoints.at(uri, line) or core.events.does_match()
- if do_break then
- core.events.discard()
- end
-
- -- check for async commands
- if not do_break then
- packet = dbgp.read_packet(active_session.skt)
- if packet then do_break = true end
- end
- end
-
+ local do_break, packet = nil, nil
+ local info = active_session.coro:getinfo(0, "S")
+ local uri = platform.get_uri(info.source)
+ if uri and uri ~= debugger_uri and uri ~= transportmodule_uri then -- the debugger does not break if the source is not known
+ do_break = core.breakpoints.at(uri, line) or core.events.does_match()
if do_break then
- local success, err = pcall(debugger_loop, active_session, packet)
- if not success then log("ERROR", "Error while debug loop: "..err) end
+ core.events.discard()
end
+
+ -- check for async commands
+ if not do_break then
+ packet = dbgp.read_packet(active_session.skt)
+ if packet then do_break = true end
+ end
+ end
+
+ if do_break then
+ local success, err = pcall(debugger_loop, active_session, packet)
+ if not success then log("ERROR", "Error while debug loop: "..err) end
+ end
end
local line_hook_coro = cocreate(function(line)
- while true do
- line_hook(line)
- line = coyield()
- end
+ while true do
+ line_hook(line)
+ line = coyield()
+ end
end)
local function debugger_hook(event, line)
- local thread = corunning() or "main"
- if event == "call" then
- stack_levels[thread] = stack_levels[thread] + 1
- elseif event == "tail call" then
- -- tail calls has no effects on stack handling: it is only used only for step commands but a such even does not
- -- interfere with any of them
- elseif event == "return" or event == "tail return" then
- stack_levels[thread] = stack_levels[thread] - 1
- else -- line event: check for breakpoint
- active_session.coro = util.CurrentThread(corunning())
- if active_session.coro[1] == "main" then
- line_hook(line)
- else
- -- run the debugger loop in another thread on the other cases (simplifies stack handling)
- assert(coresume(line_hook_coro, line))
- end
- active_session.coro = nil
+ local thread = corunning() or "main"
+ if event == "call" then
+ stack_levels[thread] = stack_levels[thread] + 1
+ elseif event == "tail call" then
+ -- tail calls has no effects on stack handling: it is only used only for step commands but a such even does not
+ -- interfere with any of them
+ elseif event == "return" or event == "tail return" then
+ stack_levels[thread] = stack_levels[thread] - 1
+ else -- line event: check for breakpoint
+ active_session.coro = util.CurrentThread(corunning())
+ if active_session.coro[1] == "main" then
+ line_hook(line)
+ else
+ -- run the debugger loop in another thread on the other cases (simplifies stack handling)
+ assert(coresume(line_hook_coro, line))
end
+ active_session.coro = nil
+ end
end
if rawget(_G, "jit") then
- debugger_hook = function(event, line)
- local thread = corunning() or "main"
- if event == "call" then
- if debug.getinfo(2, "S").what == "C" then return end
- stack_levels[thread] = stack_levels[thread] + 1
- elseif event == "return" or event == "tail return" then
- -- Return hooks are not called for tail calls in JIT (but unlike 5.2 there is no way to know whether a call is tail or not).
- -- So the only reliable way to know stack depth is to walk it.
- local depth = 2
- -- TODO: find the fastest way to call getinfo ('what' parameter)
- while debug.getinfo(depth, "f") do depth = depth + 1 end
- stack_levels[thread] = depth - 2
- elseif event == "line" then
- active_session.coro = util.CurrentThread(corunning())
- if active_session.coro[1] == "main" then
- line_hook(line)
- else
- -- run the debugger loop in another thread on the other cases (simplifies stack handling)
- assert(coresume(line_hook_coro, line))
- end
- active_session.coro = nil
- end
+ debugger_hook = function(event, line)
+ local thread = corunning() or "main"
+ if event == "call" then
+ if debug.getinfo(2, "S").what == "C" then return end
+ stack_levels[thread] = stack_levels[thread] + 1
+ elseif event == "return" or event == "tail return" then
+ -- Return hooks are not called for tail calls in JIT (but unlike 5.2 there is no way to know whether a call is tail or not).
+ -- So the only reliable way to know stack depth is to walk it.
+ local depth = 2
+ -- TODO: find the fastest way to call getinfo ('what' parameter)
+ while debug.getinfo(depth, "f") do depth = depth + 1 end
+ stack_levels[thread] = depth - 2
+ elseif event == "line" then
+ active_session.coro = util.CurrentThread(corunning())
+ if active_session.coro[1] == "main" then
+ line_hook(line)
+ else
+ -- run the debugger loop in another thread on the other cases (simplifies stack handling)
+ assert(coresume(line_hook_coro, line))
+ end
+ active_session.coro = nil
end
+ end
end
local function init(host, port, idekey, transport, executionplatform, workingdirectory)
- -- get connection data
- local host = host or os.getenv "DBGP_IDEHOST" or "127.0.0.1"
- local port = port or os.getenv "DBGP_IDEPORT" or "10000"
- local idekey = idekey or os.getenv("DBGP_IDEKEY") or "luaidekey"
+ -- get connection data
+ local host = host or os.getenv "DBGP_IDEHOST" or "127.0.0.1"
+ local port = port or os.getenv "DBGP_IDEPORT" or "10000"
+ local idekey = idekey or os.getenv("DBGP_IDEKEY") or "luaidekey"
- -- init plaform module
- local executionplatform = executionplatform or os.getenv("DBGP_PLATFORM") or nil
- local workingdirectory = workingdirectory or os.getenv("DBGP_WORKINGDIR") or nil
- platform.init(executionplatform,workingdirectory)
+ -- init plaform module
+ local executionplatform = executionplatform or os.getenv("DBGP_PLATFORM") or nil
+ local workingdirectory = workingdirectory or os.getenv("DBGP_WORKINGDIR") or nil
+ platform.init(executionplatform,workingdirectory)
- -- get transport layer
- local transportpath = transport or os.getenv("DBGP_TRANSPORT") or "debugger.transport.luasocket"
- local transport = require(transportpath)
+ -- get transport layer
+ local transportpath = transport or os.getenv("DBGP_TRANSPORT") or "debugger.transport.luasocket"
+ local transport = require(transportpath)
- -- install base64 functions into util
- util.b64, util.rawb64, util.unb64 = transport.b64, transport.rawb64, transport.unb64
+ -- install base64 functions into util
+ util.b64, util.rawb64, util.unb64 = transport.b64, transport.rawb64, transport.unb64
- local skt = assert(transport.create())
- skt:settimeout(nil)
+ local skt = assert(transport.create())
+ skt:settimeout(nil)
- -- try to connect several times: if IDE launches both process and server at same time, first connect attempts may fail
- local ok, err
- print(string.format("Debugger v%s", DBGP_CLIENT_VERSION))
- print(string.format("Debugger: Trying to connect to %s:%s ... ", host, port))
- ok, err = skt:connect(host, port)
- for i=1, 4 do
- if ok then
- print("Debugger: Connection succeed.")
- break
- else
- -- wait
- transport.sleep(0.5)
- -- then retry.
- print(string.format("Debugger: Retrying to connect to %s:%s ... ", host, port))
- ok, err = skt:connect(host, port)
- end
+ -- try to connect several times: if IDE launches both process and server at same time, first connect attempts may fail
+ local ok, err
+ print(string.format("Debugger v%s", DBGP_CLIENT_VERSION))
+ print(string.format("Debugger: Trying to connect to %s:%s ... ", host, port))
+ ok, err = skt:connect(host, port)
+ for i=1, 4 do
+ if ok then
+ print("Debugger: Connection succeed.")
+ break
+ else
+ -- wait
+ transport.sleep(0.5)
+ -- then retry.
+ print(string.format("Debugger: Retrying to connect to %s:%s ... ", host, port))
+ ok, err = skt:connect(host, port)
end
- if err then error(string.format("Cannot connect to %s:%d : %s", host, port, err)) end
+ end
+ if err then error(string.format("Cannot connect to %s:%d : %s", host, port, err)) end
- -- get the debugger and transport layer URI
- debugger_uri = platform.get_uri(debug.getinfo(1).source)
- transportmodule_uri = platform.get_uri(debug.getinfo(transport.create).source)
+ -- get the debugger and transport layer URI
+ debugger_uri = platform.get_uri(debug.getinfo(1).source)
+ transportmodule_uri = platform.get_uri(debug.getinfo(transport.create).source)
- -- get the root script path (the highest possible stack index)
- local source
- for i=2, math.huge do
- local info = debug.getinfo(i)
- if not info then break end
- source = platform.get_uri(info.source) or source
+ -- get the root script path (the highest possible stack index)
+ local source
+ for i=2, math.huge do
+ local info = debug.getinfo(i)
+ if not info then break end
+ source = platform.get_uri(info.source) or source
+ end
+ if not source then source = "unknown:/" end -- when loaded before actual script (with a command line switch)
+
+ -- generate some kind of thread identifier
+ local thread = corunning() or "main"
+ stack_levels[thread] = 1 -- the return event will set the counter to 0
+ local sessionid = tostring(os.time()) .. "_" .. tostring(thread)
+
+ dbgp.send_xml(skt, { tag = "init", attr = {
+ appid = "Lua DBGp",
+ idekey = idekey,
+ session = sessionid,
+ thread = tostring(thread),
+ parent = "",
+ language = "Lua",
+ protocol_version = "1.0",
+ fileuri = source
+ } })
+
+ --FIXME util.CurrentThread(corunning) => util.CurrentThread(corunning()) WHAT DOES IT FIXES ??
+ local sess = { skt = skt, state = "starting", id = sessionid, coro = util.CurrentThread(corunning) }
+ active_session = sess
+ debugger_loop(sess)
+
+ -- set debug hooks
+ debug.sethook(debugger_hook, "rlc")
+
+ -- install coroutine collecting functions.
+ -- TODO: maintain a list of *all* coroutines can be overkill (for example, the ones created by copcall), make a extension point to
+ -- customize debugged coroutines
+ -- coroutines are referenced during their first resume (so we are sure that they always have a stack frame)
+ local function resume_handler(coro, ...)
+ if costatus(coro) == "dead" then
+ local coro_id = core.active_coroutines.from_coro[coro]
+ core.active_coroutines.from_id[coro_id] = nil
+ core.active_coroutines.from_coro[coro] = nil
+ stack_levels[coro] = nil
end
- if not source then source = "unknown:/" end -- when loaded before actual script (with a command line switch)
+ return ...
+ end
- -- generate some kind of thread identifier
- local thread = corunning() or "main"
- stack_levels[thread] = 1 -- the return event will set the counter to 0
- local sessionid = tostring(os.time()) .. "_" .. tostring(thread)
-
- dbgp.send_xml(skt, { tag = "init", attr = {
- appid = "Lua DBGp",
- idekey = idekey,
- session = sessionid,
- thread = tostring(thread),
- parent = "",
- language = "Lua",
- protocol_version = "1.0",
- fileuri = source
- } })
-
- --FIXME util.CurrentThread(corunning) => util.CurrentThread(corunning()) WHAT DOES IT FIXES ??
- local sess = { skt = skt, state = "starting", id = sessionid, coro = util.CurrentThread(corunning) }
- active_session = sess
- debugger_loop(sess)
-
- -- set debug hooks
- debug.sethook(debugger_hook, "rlc")
-
- -- install coroutine collecting functions.
- -- TODO: maintain a list of *all* coroutines can be overkill (for example, the ones created by copcall), make a extension point to
- -- customize debugged coroutines
- -- coroutines are referenced during their first resume (so we are sure that they always have a stack frame)
- local function resume_handler(coro, ...)
- if costatus(coro) == "dead" then
- local coro_id = core.active_coroutines.from_coro[coro]
- core.active_coroutines.from_id[coro_id] = nil
- core.active_coroutines.from_coro[coro] = nil
- stack_levels[coro] = nil
- end
- return ...
+ function coroutine.resume(coro, ...)
+ if not stack_levels[coro] then
+ -- first time referenced
+ stack_levels[coro] = 0
+ core.active_coroutines.n = core.active_coroutines.n + 1
+ core.active_coroutines.from_id[core.active_coroutines.n] = coro
+ core.active_coroutines.from_coro[coro] = core.active_coroutines.n
+ debug.sethook(coro, debugger_hook, "rlc")
end
-
- function coroutine.resume(coro, ...)
- if not stack_levels[coro] then
- -- first time referenced
- stack_levels[coro] = 0
- core.active_coroutines.n = core.active_coroutines.n + 1
- core.active_coroutines.from_id[core.active_coroutines.n] = coro
- core.active_coroutines.from_coro[coro] = core.active_coroutines.n
- debug.sethook(coro, debugger_hook, "rlc")
- end
- return resume_handler(coro, coresume(coro, ...))
- end
-
- -- coroutine.wrap uses directly C API for coroutines and does not trigger our overridden coroutine.resume
- -- so this is an implementation of wrap in pure Lua
- local function wrap_handler(status, ...)
- if not status then error((...)) end
- return ...
- end
+ return resume_handler(coro, coresume(coro, ...))
+ end
- function coroutine.wrap(f)
- local coro = coroutine.create(f)
- return function(...)
- return wrap_handler(coroutine.resume(coro, ...))
- end
- end
+ -- coroutine.wrap uses directly C API for coroutines and does not trigger our overridden coroutine.resume
+ -- so this is an implementation of wrap in pure Lua
+ local function wrap_handler(status, ...)
+ if not status then error((...)) end
+ return ...
+ end
- return sess
+ function coroutine.wrap(f)
+ local coro = coroutine.create(f)
+ return function(...)
+ return wrap_handler(coroutine.resume(coro, ...))
+ end
+ end
+
+ return sess
end
return init
diff --git a/libraries/luadbgpclient/debugger/introspection.lua b/libraries/luadbgpclient/debugger/introspection.lua
index 6c7e0c8..36fb928 100644
--- a/libraries/luadbgpclient/debugger/introspection.lua
+++ b/libraries/luadbgpclient/debugger/introspection.lua
@@ -16,7 +16,7 @@
local util = require "debugger.util"
local tostring, type, assert, next, rawget, getmetatable, setmetatable, getfenv, select, coyield, cocreate, costatus, coresume, sformat, tconcat =
- tostring, type, assert, next, rawget, getmetatable, setmetatable, getfenv, select, coroutine.yield, coroutine.create, coroutine.status, coroutine.resume, string.format, table.concat
+ tostring, type, assert, next, rawget, getmetatable, setmetatable, getfenv, select, coroutine.yield, coroutine.create, coroutine.status, coroutine.resume, string.format, table.concat
local MULTIVAL_MT = { __tostring = function() return "" end }
local probes = { }
@@ -27,7 +27,7 @@
---
-- Introspection logic. This module implements Lua objects introspection and
--- generates a [DBGP](http://xdebug.org/docs-dbgp.php) compatible
+-- generates a [DBGP](http://xdebug.org/docs-dbgp.php) compatible
-- [LOM](http://matthewwild.co.uk/projects/luaexpat/lom.html) data scructure.
-- @module debugger.introspection
local M = { }
@@ -38,10 +38,10 @@
-- Modifying properties after their generation is possible (as actual data serialization/sending is delayed)
-- but should be used with care. The XML structure uses the [LOM](http://matthewwild.co.uk/projects/luaexpat/lom.html)
-- format, refer to these documents to get more informations about fields.
---
+--
-- In addition to table fields, it has an array part, `[1]` being the string representation (base64 encoded),
-- possibly followed by chlid properties (@{#DBGPProperty} themselves)
---
+--
-- @field #string tag Always "property"
-- @field #table attr XML attributes, see DBGP specification
-- @type DBGPProperty
@@ -81,7 +81,7 @@
-- @return #nil If the value has not been inspected
-- @function [parent=#debugger.introspection] inspect
M.inspect = function(name, value, parent, fullname)
- return (M.inspectors[type(value)] or M.inspectors.default)(name, value, parent, fullname)
+ return (M.inspectors[type(value)] or M.inspectors.default)(name, value, parent, fullname)
end
-- ----------------- --
@@ -89,55 +89,55 @@
-- ----------------- --
local function default_inspector(name, value, parent, fullname)
- return M.property(name, type(value), tostring(value), parent, fullname)
+ return M.property(name, type(value), tostring(value), parent, fullname)
end
-- Inspects types that can have a metatable (table and userdata). Returns
-- 1) generated property
-- 2) boolean indicating whether a custom inspector has been called (in that case, do not process value any further)
local function metatable_inspector(name, value, parent, fullname)
- local mt = getmetatable(value)
- do
- -- find by metatable
- local custom = M.inspectors[mt]
- if custom then return custom(name, value, parent, fullname), true end
- -- or else call probes
- for i=1, #probes do
- local prop = probes[i](name, value, parent, fullname)
- if prop then return prop, true end
- end
+ local mt = getmetatable(value)
+ do
+ -- find by metatable
+ local custom = M.inspectors[mt]
+ if custom then return custom(name, value, parent, fullname), true end
+ -- or else call probes
+ for i=1, #probes do
+ local prop = probes[i](name, value, parent, fullname)
+ if prop then return prop, true end
end
+ end
- local prop = default_inspector(name, value, parent, fullname)
- if mt and prop then
- local mtprop = M.inspect("metatable", mt, prop, "metatable["..prop.attr.fullname.."]")
- if mtprop then mtprop.attr.type = "special" end
- end
- return prop, false
+ local prop = default_inspector(name, value, parent, fullname)
+ if mt and prop then
+ local mtprop = M.inspect("metatable", mt, prop, "metatable["..prop.attr.fullname.."]")
+ if mtprop then mtprop.attr.type = "special" end
+ end
+ return prop, false
end
local function fancy_func_repr(f, info)
- local args = {}
- for i=1, info.nparams do
- args[i] = debug.getlocal(f, i)
- end
+ local args = {}
+ for i=1, info.nparams do
+ args[i] = debug.getlocal(f, i)
+ end
- if info.isvararg then
- args[#args+1] = "..."
- end
+ if info.isvararg then
+ args[#args+1] = "..."
+ end
- return "function(" .. tconcat(args, ", ") .. ")"
+ return "function(" .. tconcat(args, ", ") .. ")"
end
--- Generate a name siutable for table index syntax
--- @param name Key name
+-- @param name Key name
-- @return #string A table index style index
-- @usage generate_printable_key('foo') => '["foo"]'
-- @usage generate_printable_key(12) => '[12]'
-- @usage generate_printable_key({}) => '[table: 0x12345678]
-- @function [parent=#debugger.introspection] generate_printable_key
local function generate_printable_key(name)
- return "[" .. (type(name) == "string" and sformat("%q", name) or tostring(name)) .. "]"
+ return "[" .. (type(name) == "string" and sformat("%q", name) or tostring(name)) .. "]"
end
M.generate_printable_key = generate_printable_key
@@ -147,15 +147,15 @@
M.key_cache = setmetatable({ n=0 }, { __mode = "v" })
local function generate_key(name)
- local tname = type(name)
- if tname == "string" then return sformat("%q", name)
- elseif tname == "number" or tname == "boolean" then return tostring(name)
- else -- complex key, use key_cache for lookup
- local i = M.key_cache.n
- M.key_cache[i] = name
- M.key_cache.n = i+1
- return "key_cache["..tostring(i).."]"
- end
+ local tname = type(name)
+ if tname == "string" then return sformat("%q", name)
+ elseif tname == "number" or tname == "boolean" then return tostring(name)
+ else -- complex key, use key_cache for lookup
+ local i = M.key_cache.n
+ M.key_cache[i] = name
+ M.key_cache.n = i+1
+ return "key_cache["..tostring(i).."]"
+ end
end
--- Generate a usable fullname for a value.
@@ -168,7 +168,7 @@
-- @return #string A valid fullname expression
-- @function [parent=#debugger.introspection] make_fullname
local function make_fullname(parent, key)
- return parent .. "[" .. generate_key(key) .. "]"
+ return parent .. "[" .. generate_key(key) .. "]"
end
M.make_fullname = make_fullname
@@ -184,73 +184,73 @@
M.inspectors.default = default_inspector -- allows 3rd party inspectors to use the default inspector if needed
M.inspectors.userdata = function(name, value, parent, fullname)
- return (metatable_inspector(name, value, parent, fullname)) -- drop second return value
+ return (metatable_inspector(name, value, parent, fullname)) -- drop second return value
end
M.inspectors.string = function(name, value, parent, fullname)
- -- escape linebreaks as \n and not as \<0x0A> like %q does
- return M.property(name, "string", sformat("%q", value):gsub("\\\n", "\\n"), parent, fullname)
+ -- escape linebreaks as \n and not as \<0x0A> like %q does
+ return M.property(name, "string", sformat("%q", value):gsub("\\\n", "\\n"), parent, fullname)
end
M.inspectors["function"] = function(name, value, parent, fullname)
- local info = debug.getinfo(value, "nSflu")
- local prop
- if info.what ~= "C" then
- -- try to create a fancy representation if possible
- local repr = info.nparams and fancy_func_repr(value, info) or tostring(value)
- if info.source:sub(1,1) == "@" then
- repr = repr .. "\n" .. platform.get_uri("@" .. info.source) .. "\n" .. tostring(info.linedefined)
- end
- prop = M.property(name, "function (Lua)", repr, parent, fullname)
- else
- prop = M.property(name, "function", tostring(value), parent, fullname)
+ local info = debug.getinfo(value, "nSflu")
+ local prop
+ if info.what ~= "C" then
+ -- try to create a fancy representation if possible
+ local repr = info.nparams and fancy_func_repr(value, info) or tostring(value)
+ if info.source:sub(1,1) == "@" then
+ repr = repr .. "\n" .. platform.get_uri("@" .. info.source) .. "\n" .. tostring(info.linedefined)
end
- if not prop then return nil end
-
- -- (5.1 only) environment is dumped only if it is different from global environment
- -- TODO: this is not a correct behavior: environment should be dumped if is different from current stack level one
- local fenv = getfenv and getfenv(value)
- if fenv and fenv ~= getfenv(0) then
- local fenvprop = M.inspect("environment", fenv, prop, "environment["..prop.attr.fullname.."]")
- if fenvprop then fenvprop.attr.type = "special" end
- end
-
- return prop
+ prop = M.property(name, "function (Lua)", repr, parent, fullname)
+ else
+ prop = M.property(name, "function", tostring(value), parent, fullname)
+ end
+ if not prop then return nil end
+
+ -- (5.1 only) environment is dumped only if it is different from global environment
+ -- TODO: this is not a correct behavior: environment should be dumped if is different from current stack level one
+ local fenv = getfenv and getfenv(value)
+ if fenv and fenv ~= getfenv(0) then
+ local fenvprop = M.inspect("environment", fenv, prop, "environment["..prop.attr.fullname.."]")
+ if fenvprop then fenvprop.attr.type = "special" end
+ end
+
+ return prop
end
M.inspectors.table = function(name, value, parent, fullname)
- local prop, iscustom = metatable_inspector(name, value, parent, fullname)
- if not prop or iscustom then return prop end
-
- -- iterate over table values and detect arrays at the same time
- -- next is used to circumvent __pairs metamethod in 5.2
- local isarray, i = true, 1
- for k,v in next, value, nil do
- M.inspect(generate_printable_key(k), v, prop, make_fullname(fullname, k))
- -- array detection: keys should be accessible by 1..n keys
- isarray = isarray and rawget(value, i) ~= nil
- i = i + 1
- end
- -- empty tables are considered as tables
- if isarray and i > 1 then prop.attr.type = "sequence" end
-
- return prop
+ local prop, iscustom = metatable_inspector(name, value, parent, fullname)
+ if not prop or iscustom then return prop end
+
+ -- iterate over table values and detect arrays at the same time
+ -- next is used to circumvent __pairs metamethod in 5.2
+ local isarray, i = true, 1
+ for k,v in next, value, nil do
+ M.inspect(generate_printable_key(k), v, prop, make_fullname(fullname, k))
+ -- array detection: keys should be accessible by 1..n keys
+ isarray = isarray and rawget(value, i) ~= nil
+ i = i + 1
+ end
+ -- empty tables are considered as tables
+ if isarray and i > 1 then prop.attr.type = "sequence" end
+
+ return prop
end
M.inspectors[MULTIVAL_MT] = function(name, value, parent, fullname)
- if value.n == 1 then
- -- return directly the value as result
- return M.inspect(name, value[1], parent, fullname)
- else
- -- wrap values inside a multival container
- local prop = M.property(name, "multival", "", parent, fullname)
- if not prop then return nil end
- for i=1, value.n do
- M.inspect(generate_printable_key(i), value[i], prop, fullname .. "[" .. i .. "]")
- end
- return prop
+ if value.n == 1 then
+ -- return directly the value as result
+ return M.inspect(name, value[1], parent, fullname)
+ else
+ -- wrap values inside a multival container
+ local prop = M.property(name, "multival", "", parent, fullname)
+ if not prop then return nil end
+ for i=1, value.n do
+ M.inspect(generate_printable_key(i), value[i], prop, fullname .. "[" .. i .. "]")
end
+ return prop
+ end
end
-- ------------ --
@@ -261,7 +261,7 @@
-- value to inspect. The Multival instances can be passed to make_property as a single value, they will be
-- correctly reported to debugger
function M.Multival(...)
- return setmetatable({ n=select("#", ...), ... }, MULTIVAL_MT)
+ return setmetatable({ n=select("#", ...), ... }, MULTIVAL_MT)
end
--- Makes a property form a name/value pair (and fullname). This is an **internal** function, and should not be used by 3rd party inspectors.
@@ -278,63 +278,63 @@
-- @function [parent=#debugger.introspection] make_property
--TODO BUG ECLIPSE TOOLSLINUX-99 352316 : as a workaround, context is encoded into the fullname property
M.make_property = function(cxt_id, value, name, fullname, depth, pagesize, page, size_limit, safe_name)
- fullname = fullname or "(...)[" .. generate_key(name) .. "]"
- if not safe_name then name = generate_printable_key(name) end
+ fullname = fullname or "(...)[" .. generate_key(name) .. "]"
+ if not safe_name then name = generate_printable_key(name) end
- local generator = cocreate(function() return M.inspect(name, value, nil, fullname) end)
- local propstack = { }
- local rootnode
- local catchthis = true
- local nodestoskip = page * pagesize -- nodes to skip at root level to respect pagination
- local fullname_prefix = tostring(cxt_id).."|"
+ local generator = cocreate(function() return M.inspect(name, value, nil, fullname) end)
+ local propstack = { }
+ local rootnode
+ local catchthis = true
+ local nodestoskip = page * pagesize -- nodes to skip at root level to respect pagination
+ local fullname_prefix = tostring(cxt_id).."|"
- while true do
- local succes, name, datatype, repr, parent, fullname = assert(coresume(generator, catchthis and propstack[#propstack] or nil))
- -- finalize and pop all finished properties
- while propstack[#propstack] ~= parent do
- local topop = propstack[#propstack]
- topop.attr.fullname = util.rawb64(fullname_prefix .. topop.attr.fullname)
- propstack[#propstack] = nil
- end
- if costatus(generator) == "dead" then break end
-
- local prop = {
- tag = "property",
- attr = {
- children = 0,
- pagesize = pagesize,
- page = parent and 0 or page,
- type = datatype,
- name = name,
- fullname = fullname,
- encoding = "base64",
- size = #repr,
- },
- util.b64(size_limit and repr:sub(1, size_limit) or repr)
- }
-
- if parent then
- parent.attr.children = 1
- parent.attr.numchildren = (parent.attr.numchildren or 0) + 1
- -- take pagination into accont to know if node needs to be catched
- catchthis = #parent <= pagesize and #propstack <= depth
- if parent == rootnode then
- catchthis = catchthis and nodestoskip <= 0
- nodestoskip = nodestoskip - 1
- end
- -- add node to tree
- if catchthis then
- parent[#parent + 1] = prop
- propstack[#propstack + 1] = prop
- end
- else
- rootnode = prop
- catchthis = true
- propstack[#propstack + 1] = prop
- end
+ while true do
+ local succes, name, datatype, repr, parent, fullname = assert(coresume(generator, catchthis and propstack[#propstack] or nil))
+ -- finalize and pop all finished properties
+ while propstack[#propstack] ~= parent do
+ local topop = propstack[#propstack]
+ topop.attr.fullname = util.rawb64(fullname_prefix .. topop.attr.fullname)
+ propstack[#propstack] = nil
end
+ if costatus(generator) == "dead" then break end
- return rootnode
+ local prop = {
+ tag = "property",
+ attr = {
+ children = 0,
+ pagesize = pagesize,
+ page = parent and 0 or page,
+ type = datatype,
+ name = name,
+ fullname = fullname,
+ encoding = "base64",
+ size = #repr,
+ },
+ util.b64(size_limit and repr:sub(1, size_limit) or repr)
+ }
+
+ if parent then
+ parent.attr.children = 1
+ parent.attr.numchildren = (parent.attr.numchildren or 0) + 1
+ -- take pagination into accont to know if node needs to be catched
+ catchthis = #parent <= pagesize and #propstack <= depth
+ if parent == rootnode then
+ catchthis = catchthis and nodestoskip <= 0
+ nodestoskip = nodestoskip - 1
+ end
+ -- add node to tree
+ if catchthis then
+ parent[#parent + 1] = prop
+ propstack[#propstack + 1] = prop
+ end
+ else
+ rootnode = prop
+ catchthis = true
+ propstack[#propstack + 1] = prop
+ end
+ end
+
+ return rootnode
end
return M
diff --git a/libraries/luadbgpclient/debugger/platform.lua b/libraries/luadbgpclient/debugger/platform.lua
index 2a25826..b7a55ac 100644
--- a/libraries/luadbgpclient/debugger/platform.lua
+++ b/libraries/luadbgpclient/debugger/platform.lua
@@ -35,172 +35,172 @@
--- Returns a RFC2396 compliant URI for given source, or false if the mapping failed
local function get_abs_file_uri (source)
- local uri
- if source:sub(1,1) == "@" then -- real source file
- local sourcepath = source:sub(2)
- local normalizedpath = M.normalize(sourcepath)
- if not M.is_path_absolute(normalizedpath) then
- normalizedpath = M.normalize(M.base_dir .. "/" .. normalizedpath)
- end
- return M.to_file_uri(normalizedpath)
- else -- dynamic code, stripped bytecode, tail return, ...
- return false
+ local uri
+ if source:sub(1,1) == "@" then -- real source file
+ local sourcepath = source:sub(2)
+ local normalizedpath = M.normalize(sourcepath)
+ if not M.is_path_absolute(normalizedpath) then
+ normalizedpath = M.normalize(M.base_dir .. "/" .. normalizedpath)
end
+ return M.to_file_uri(normalizedpath)
+ else -- dynamic code, stripped bytecode, tail return, ...
+ return false
+ end
end
--FIXME: as result is cached, changes in package.path that modify the module name are missed
-- (mostly affect main module when Lua interpreter is launched with an absolute path)
local function get_module_uri (source)
- if source:sub(1,1) == "@" then -- real source file
- local uri
- local sourcepath = source:sub(2)
- local normalizedpath = M.normalize(sourcepath)
- local luapathtable = split (package.path, ";")
- local is_source_absolute = M.is_path_absolute(sourcepath)
- -- workarround : Add always the ?.lua entry to support
- -- the case where file was loaded by : "lua myfile.lua"
- table.insert(luapathtable,"?.lua")
- for i,var in ipairs(luapathtable) do
- -- avoid relative patterns matching absolute ones (e.g. ?.lua matches anything)
- if M.is_path_absolute(var) == is_source_absolute then
- local escaped = string.gsub(M.normalize(var),"[%^%$%(%)%%%.%[%]%*%+%-%?]",function(c) return "%"..c end)
- local pattern = string.gsub(escaped,"%%%?","(.+)")
- local modulename = string.match(normalizedpath,pattern)
- if modulename then
- modulename = string.gsub(modulename,"/",".");
- -- if we find more than 1 possible modulename return the shorter
- if not uri or string.len(uri)>string.len(modulename) then
- uri = modulename
- end
- end
- end
+ if source:sub(1,1) == "@" then -- real source file
+ local uri
+ local sourcepath = source:sub(2)
+ local normalizedpath = M.normalize(sourcepath)
+ local luapathtable = split (package.path, ";")
+ local is_source_absolute = M.is_path_absolute(sourcepath)
+ -- workarround : Add always the ?.lua entry to support
+ -- the case where file was loaded by : "lua myfile.lua"
+ table.insert(luapathtable,"?.lua")
+ for i,var in ipairs(luapathtable) do
+ -- avoid relative patterns matching absolute ones (e.g. ?.lua matches anything)
+ if M.is_path_absolute(var) == is_source_absolute then
+ local escaped = string.gsub(M.normalize(var),"[%^%$%(%)%%%.%[%]%*%+%-%?]",function(c) return "%"..c end)
+ local pattern = string.gsub(escaped,"%%%?","(.+)")
+ local modulename = string.match(normalizedpath,pattern)
+ if modulename then
+ modulename = string.gsub(modulename,"/",".");
+ -- if we find more than 1 possible modulename return the shorter
+ if not uri or string.len(uri)>string.len(modulename) then
+ uri = modulename
+ end
end
- if uri then return "module:///"..uri end
+ end
end
- return false
+ if uri then return "module:///"..uri end
+ end
+ return false
end
function M.get_uri (source)
- -- search in cache
- local uri = uri_cache[source]
- if uri ~= nil then return uri end
+ -- search in cache
+ local uri = uri_cache[source]
+ if uri ~= nil then return uri end
- -- not found, create uri
- if util.features.uri == "module" then
- uri = get_module_uri(source)
- if not uri then uri = get_abs_file_uri (source) end
- else
- uri = get_abs_file_uri (source)
- end
+ -- not found, create uri
+ if util.features.uri == "module" then
+ uri = get_module_uri(source)
+ if not uri then uri = get_abs_file_uri (source) end
+ else
+ uri = get_abs_file_uri (source)
+ end
- uri_cache[source] = uri
- return uri
+ uri_cache[source] = uri
+ return uri
end
-- get path file from uri
function M.get_path (uri)
- local parsed_path = assert(url.parse(uri))
- if parsed_path.scheme == "file" then
- return M.to_path(parsed_path)
- else
- -- search in cache
- -- we should surely calculate it instead of find in cache
- for k,v in pairs(uri_cache)do
- if v == uri then
- assert(k:sub(1,1) == "@")
- return k:sub(2)
- end
- end
+ local parsed_path = assert(url.parse(uri))
+ if parsed_path.scheme == "file" then
+ return M.to_path(parsed_path)
+ else
+ -- search in cache
+ -- we should surely calculate it instead of find in cache
+ for k,v in pairs(uri_cache)do
+ if v == uri then
+ assert(k:sub(1,1) == "@")
+ return k:sub(2)
+ end
end
+ end
end
function M.normalize(path)
- local parts = { }
- for w in path:gmatch("[^/]+") do
- if w == ".." and #parts ~=0 then table.remove(parts)
- elseif w ~= "." then table.insert(parts, w)
- end
+ local parts = { }
+ for w in path:gmatch("[^/]+") do
+ if w == ".." and #parts ~=0 then table.remove(parts)
+ elseif w ~= "." then table.insert(parts, w)
end
- return (path:sub(1,1) == "/" and "/" or "") .. table.concat(parts, "/")
+ end
+ return (path:sub(1,1) == "/" and "/" or "") .. table.concat(parts, "/")
end
function M.init(executionplatform,workingdirectory)
- --------------------------
- -- define current platform
- --------------------------
- -- check parameter
- if executionplatform and executionplatform ~= "unix" and executionplatform ~="win" then
- error("Unable to initialize platform module : execution platform should be 'unix' or 'win'.")
+ --------------------------
+ -- define current platform
+ --------------------------
+ -- check parameter
+ if executionplatform and executionplatform ~= "unix" and executionplatform ~="win" then
+ error("Unable to initialize platform module : execution platform should be 'unix' or 'win'.")
+ end
+
+ -- use parameter as current platform
+ if executionplatform then
+ platform = executionplatform
+ else
+ --if not define try to guess it.
+ local function iswindows()
+ local p = io.popen("echo %os%")
+ if p then
+ local result =p:read("*l")
+ p:close()
+ return result == "Windows_NT"
+ end
+ return false
end
- -- use parameter as current platform
- if executionplatform then
- platform = executionplatform
+ local status, iswin = pcall(iswindows)
+ if status and iswin then
+ platform = "win"
else
- --if not define try to guess it.
- local function iswindows()
- local p = io.popen("echo %os%")
- if p then
- local result =p:read("*l")
- p:close()
- return result == "Windows_NT"
- end
- return false
- end
-
- local status, iswin = pcall(iswindows)
- if status and iswin then
- platform = "win"
- else
- platform = "unix"
- end
+ platform = "unix"
end
+ end
- --------------------------
- -- platform dependent function
- --------------------------
- if platform == "unix" then
- -- The Path separator character
- M.path_sep = "/"
+ --------------------------
+ -- platform dependent function
+ --------------------------
+ if platform == "unix" then
+ -- The Path separator character
+ M.path_sep = "/"
- -- TODO the way to get the absolute path can be wrong if the program loads new source files by relative path after a cd.
- -- currently, the directory is registered on start, this allows program to load any source file and then change working dir,
- -- which is the most common use case.
- M.base_dir = workingdirectory or os.getenv("PWD")
+ -- TODO the way to get the absolute path can be wrong if the program loads new source files by relative path after a cd.
+ -- currently, the directory is registered on start, this allows program to load any source file and then change working dir,
+ -- which is the most common use case.
+ M.base_dir = workingdirectory or os.getenv("PWD")
- -- convert parsed URL table to file path for the current OS (see url.parse from luasocket)
- M.to_file_uri = function (path) return url.build{scheme="file",authority="", path=path} end
+ -- convert parsed URL table to file path for the current OS (see url.parse from luasocket)
+ M.to_file_uri = function (path) return url.build{scheme="file",authority="", path=path} end
- -- return true is the path is absolute
- -- the path must be normalized
- M.is_path_absolute = function (path) return path:sub(1,1) == "/" end
+ -- return true is the path is absolute
+ -- the path must be normalized
+ M.is_path_absolute = function (path) return path:sub(1,1) == "/" end
- -- convert absolute normalized path file to uri
- M.to_path = function (parsed_url) return url.unescape(parsed_url.path) end
- else
- -- Implementations for Windows, see UNIX versions for documentation.
- M.path_sep = "\\"
- M.is_path_absolute = function (path) return path:match("^%a:/") end
- M.to_file_uri = function (path) return url.build{scheme="file",authority="", path="/"..path} end
- M.to_path = function (parsed_url) return url.unescape(parsed_url.path):gsub("^/", "") end
+ -- convert absolute normalized path file to uri
+ M.to_path = function (parsed_url) return url.unescape(parsed_url.path) end
+ else
+ -- Implementations for Windows, see UNIX versions for documentation.
+ M.path_sep = "\\"
+ M.is_path_absolute = function (path) return path:match("^%a:/") end
+ M.to_file_uri = function (path) return url.build{scheme="file",authority="", path="/"..path} end
+ M.to_path = function (parsed_url) return url.unescape(parsed_url.path):gsub("^/", "") end
- local unixnormalize = M.normalize
- M.normalize = function(path) return unixnormalize(path:gsub("\\","/"):lower()) end
+ local unixnormalize = M.normalize
+ M.normalize = function(path) return unixnormalize(path:gsub("\\","/"):lower()) end
- -- determine base dir
- local function getworkingdirectory()
- local p = io.popen("echo %cd%")
- if p then
- local res = p:read("*l")
- p:close()
- return M.normalize(res)
- end
- end
- M.base_dir = workingdirectory or getworkingdirectory()
-
+ -- determine base dir
+ local function getworkingdirectory()
+ local p = io.popen("echo %cd%")
+ if p then
+ local res = p:read("*l")
+ p:close()
+ return M.normalize(res)
+ end
end
+ M.base_dir = workingdirectory or getworkingdirectory()
- if not M.base_dir then error("Unable to determine the working directory.") end
+ end
+
+ if not M.base_dir then error("Unable to determine the working directory.") end
end
return M
diff --git a/libraries/luadbgpclient/debugger/plugins/ffi/init.lua b/libraries/luadbgpclient/debugger/plugins/ffi/init.lua
index e72660b..6e6479e 100644
--- a/libraries/luadbgpclient/debugger/plugins/ffi/init.lua
+++ b/libraries/luadbgpclient/debugger/plugins/ffi/init.lua
@@ -35,36 +35,36 @@
M.inspect_references = true
local function make_typename(refct)
- local t = refct.what
- if t == "int" then
- if refct.bool then t = "bool"
- else
- -- use C99 type notation to give more details about acutal type
- t = (refct.unsigned and "uint" or "int") .. tostring(refct.size * 8) .. "_t"
- end
- elseif t == "float" then
- -- assume IEEE754
- if refct.size == 8 then t = "double"
- elseif refct.size == 16 then t = "long double" -- not really sure this one is always true
- end
- elseif t == "struct" or t == "enum" or t == "union" then
- t = refct.name and (t .. " " .. refct.name) or ("anonymous "..t)
- elseif t == "func" then
- t = "function (FFI)"
- elseif t == "ptr" then
- t = make_typename(refct.element_type) .. "*"
- elseif t == "ref" then
- t = make_typename(refct.element_type) .. "&"
- elseif t == "field" then
- return make_typename(refct.type)
- elseif t == "bitfield" then
- t = (refct.type.unsigned and "unsigned" or "signed") .. ":" .. tostring(refct.size * 8)
- refct = refct.type
+ local t = refct.what
+ if t == "int" then
+ if refct.bool then t = "bool"
+ else
+ -- use C99 type notation to give more details about acutal type
+ t = (refct.unsigned and "uint" or "int") .. tostring(refct.size * 8) .. "_t"
end
-
- if refct.const then t = "const " .. t end
- if refct.volatile then t = "volatile " .. t end
- return t
+ elseif t == "float" then
+ -- assume IEEE754
+ if refct.size == 8 then t = "double"
+ elseif refct.size == 16 then t = "long double" -- not really sure this one is always true
+ end
+ elseif t == "struct" or t == "enum" or t == "union" then
+ t = refct.name and (t .. " " .. refct.name) or ("anonymous "..t)
+ elseif t == "func" then
+ t = "function (FFI)"
+ elseif t == "ptr" then
+ t = make_typename(refct.element_type) .. "*"
+ elseif t == "ref" then
+ t = make_typename(refct.element_type) .. "&"
+ elseif t == "field" then
+ return make_typename(refct.type)
+ elseif t == "bitfield" then
+ t = (refct.type.unsigned and "unsigned" or "signed") .. ":" .. tostring(refct.size * 8)
+ refct = refct.type
+ end
+
+ if refct.const then t = "const " .. t end
+ if refct.volatile then t = "volatile " .. t end
+ return t
end
-- if cdatakind is unknown, this one will be called
@@ -75,121 +75,121 @@
-- we may create boxed references or Lua native objects which will be inspected as such
-- (leading to wrong type names).
local function recurse(name, value, parent, fullname, refct)
- if type(value) == "cdata" then
- return inspect(name, value, parent, fullname, refct)
- else
- local prop = introspection.inspect(name, value, parent, fullname)
- if prop then
- prop.attr.type = make_typename(refct)
- end
- return prop
+ if type(value) == "cdata" then
+ return inspect(name, value, parent, fullname, refct)
+ else
+ local prop = introspection.inspect(name, value, parent, fullname)
+ if prop then
+ prop.attr.type = make_typename(refct)
end
+ return prop
+ end
end
-- cdata specific inspectors
local inspectors = {
- struct = function(name, value, parent, fullname, refct)
- local prop = introspection.property(name, make_typename(refct), tostring(value), parent, fullname)
+ struct = function(name, value, parent, fullname, refct)
+ local prop = introspection.property(name, make_typename(refct), tostring(value), parent, fullname)
- -- inspect children, if needed
- if prop then
- for member in refct:members() do
- local mname = member.name
- recurse(mname, value[mname], prop, fullname .. sformat('[%q]', mname), member)
- end
- end
- return prop
- end,
+ -- inspect children, if needed
+ if prop then
+ for member in refct:members() do
+ local mname = member.name
+ recurse(mname, value[mname], prop, fullname .. sformat('[%q]', mname), member)
+ end
+ end
+ return prop
+ end,
- array = function(name, value, parent, fullname, refct)
- local etype = refct.element_type
- -- for VLAs, reflect does not give size
- local size = refct.size ~= "none" and refct.size or ffi.sizeof(value)
- size = size and (size / etype.size) -- we've got the byte size, not element count
-
- local typename = make_typename(etype)
- local prop = introspection.property(name, typename .. "[" .. (tostring(size) or "") .. "]", tostring(value), parent, fullname)
-
- if prop and size then
- for i=0, size-1 do
- local idx = "["..tostring(i).."]"
- recurse(idx, value[i], prop, fullname .. idx, etype)
- end
- end
- return prop
- end,
+ array = function(name, value, parent, fullname, refct)
+ local etype = refct.element_type
+ -- for VLAs, reflect does not give size
+ local size = refct.size ~= "none" and refct.size or ffi.sizeof(value)
+ size = size and (size / etype.size) -- we've got the byte size, not element count
- func = function(name, value, parent, fullname, refct)
- local args = { }
- for arg in refct:arguments() do
- args[#args + 1] = make_typename(arg.type) .. " " .. arg.name
- end
-
- if refct.vararg then
- args[#args + 1] = "..."
- end
-
- local repr = make_typename(refct.return_type) .. " " .. refct.name .. "(" .. tconcat(args, ", ") .. ")"
- return introspection.property(name, make_typename(refct), repr, parent, fullname)
- end,
+ local typename = make_typename(etype)
+ local prop = introspection.property(name, typename .. "[" .. (tostring(size) or "") .. "]", tostring(value), parent, fullname)
- enum = function(name, value, parent, fullname, refct)
- local repr = tonumber(value)
- -- try to convert numeric value into enum name
- --TODO: is there a faster method to make it ?
- for val in refct:values() do
- if val.value == repr then
- repr = val.name
- break
- end
- end
-
- return introspection.property(name, make_typename(refct), tostring(repr), parent, fullname)
- end,
-
- ref = function(name, value, parent, fullname, refct)
- -- this may be unsafe, see inspect_references setting
- local typename = make_typename(refct)
- if not M.inspect_references then
- return introspection.property(name, typename, tostring(value), parent, fullname)
- end
-
- local prop = recurse(name, value, parent, fullname, refct.element_type)
- if prop then
- prop.attr.type = typename
- end
- return prop
- end,
-
- int = function(name, value, parent, fullname, refct)
- return introspection.property(name, make_typename(refct), tostring(tonumber(value)), parent, fullname)
- end,
-
- -- pointers are too unsafe, do not inspect them
- ptr = function(name, value, parent, fullname, refct)
- return introspection.property(name, make_typename(refct), tostring(value), parent, fullname)
- end,
+ if prop and size then
+ for i=0, size-1 do
+ local idx = "["..tostring(i).."]"
+ recurse(idx, value[i], prop, fullname .. idx, etype)
+ end
+ end
+ return prop
+ end,
+
+ func = function(name, value, parent, fullname, refct)
+ local args = { }
+ for arg in refct:arguments() do
+ args[#args + 1] = make_typename(arg.type) .. " " .. arg.name
+ end
+
+ if refct.vararg then
+ args[#args + 1] = "..."
+ end
+
+ local repr = make_typename(refct.return_type) .. " " .. refct.name .. "(" .. tconcat(args, ", ") .. ")"
+ return introspection.property(name, make_typename(refct), repr, parent, fullname)
+ end,
+
+ enum = function(name, value, parent, fullname, refct)
+ local repr = tonumber(value)
+ -- try to convert numeric value into enum name
+ --TODO: is there a faster method to make it ?
+ for val in refct:values() do
+ if val.value == repr then
+ repr = val.name
+ break
+ end
+ end
+
+ return introspection.property(name, make_typename(refct), tostring(repr), parent, fullname)
+ end,
+
+ ref = function(name, value, parent, fullname, refct)
+ -- this may be unsafe, see inspect_references setting
+ local typename = make_typename(refct)
+ if not M.inspect_references then
+ return introspection.property(name, typename, tostring(value), parent, fullname)
+ end
+
+ local prop = recurse(name, value, parent, fullname, refct.element_type)
+ if prop then
+ prop.attr.type = typename
+ end
+ return prop
+ end,
+
+ int = function(name, value, parent, fullname, refct)
+ return introspection.property(name, make_typename(refct), tostring(tonumber(value)), parent, fullname)
+ end,
+
+ -- pointers are too unsafe, do not inspect them
+ ptr = function(name, value, parent, fullname, refct)
+ return introspection.property(name, make_typename(refct), tostring(value), parent, fullname)
+ end,
}
inspectors.union = inspectors.struct
inspectors.float = inspectors.int
--- for struct/union fields, the actual type is nested into the refct
+-- for struct/union fields, the actual type is nested into the refct
inspectors.field = function(name, value, parent, fullname, refct)
- return inspect(name, value, parent, fullname, refct.type)
+ return inspect(name, value, parent, fullname, refct.type)
end
inspectors.bitfield = inspectors.field
inspect = function(name, value, parent, fullname, refct)
- -- inspect only values, not ctypes
- --FIXME: this cause references to be dereferenced and crash the process if they are wrong !
- if ffi.typeof(value) ~= value then
- refct = refct or reflect.typeof(value)
- return (inspectors[refct.what] or default_inspector)(name, value, parent, fullname, refct)
- end
-
- -- return a simple property for ctypes
- return introspection.property(name, "ctype", tostring(value), parent, fullname)
+ -- inspect only values, not ctypes
+ --FIXME: this cause references to be dereferenced and crash the process if they are wrong !
+ if ffi.typeof(value) ~= value then
+ refct = refct or reflect.typeof(value)
+ return (inspectors[refct.what] or default_inspector)(name, value, parent, fullname, refct)
+ end
+
+ -- return a simple property for ctypes
+ return introspection.property(name, "ctype", tostring(value), parent, fullname)
end
introspection.inspectors.cdata = inspect
diff --git a/libraries/luadbgpclient/debugger/plugins/ffi/reflect.lua b/libraries/luadbgpclient/debugger/plugins/ffi/reflect.lua
index 56cf1f1..f65209d 100644
--- a/libraries/luadbgpclient/debugger/plugins/ffi/reflect.lua
+++ b/libraries/luadbgpclient/debugger/plugins/ffi/reflect.lua
@@ -48,7 +48,7 @@
if gcref ~= 0 then
local ts = ffi.cast("uint32_t*", gcref)
return ffi.string(ts + 4, ts[3])
- end
+end
end
local function memptr(gcobj)
@@ -170,7 +170,7 @@
-- Create a metatable for each CT.
local metatables = {
-}
+ }
for _, CT in ipairs(CTs) do
local what = CT[1]
local mt = {__index = {}}
@@ -195,7 +195,7 @@
}
-- C function calling conventions (CTCC_* constants in lj_refct.h)
-local CTCCs = {[0] =
+local CTCCs = {[0] =
"cdecl",
"thiscall",
"fastcall",
@@ -212,7 +212,7 @@
typeid = id,
name = gc_str(ctype.name),
}, metatables[what])
-
+
-- Interpret (most of) the CType::info field
for i = 5, #CT do
if bit.band(ctype.info, CT[i][1]) ~= 0 then
@@ -228,7 +228,7 @@
elseif what == "func" then
refct.convention = CTCCs[bit.band(bit.rshift(ctype.info, 16), 3)]
end
-
+
if CT[2] ~= "" then -- Interpret the CType::cid field
local k = CT[2]
local cid = bit.band(ctype.info, 0xffff)
@@ -241,7 +241,7 @@
end
refct[k] = cid
end
-
+
if CT[3] ~= "" then -- Interpret the CType::size field
local k = CT[3]
refct[k] = ctype.size
@@ -249,7 +249,7 @@
refct[k] = "none"
end
end
-
+
if what == "attrib" then
-- Merge leading attributes onto the type being decorated.
local CTA = CTAs[bit.band(bit.rshift(ctype.info, 16), 0xff)]
@@ -276,7 +276,7 @@
}
refct.bool, refct.const, refct.volatile, refct.unsigned = nil
end
-
+
if CT[4] then -- Merge sibling attributes onto this type.
while ctype.sib ~= 0 do
local entry = CTState.tab[ctype.sib]
@@ -285,9 +285,9 @@
local sib = refct_from_id(ctype.sib)
sib:CTA(refct)
ctype = entry
- end
end
-
+ end
+
return refct
end
@@ -343,4 +343,4 @@
return miscmap[-tonumber(ffi.typeof(x))]
end
-return reflect
\ No newline at end of file
+return reflect
diff --git a/libraries/luadbgpclient/debugger/transport/apr.lua b/libraries/luadbgpclient/debugger/transport/apr.lua
index 5370245..3f62bfa 100644
--- a/libraries/luadbgpclient/debugger/transport/apr.lua
+++ b/libraries/luadbgpclient/debugger/transport/apr.lua
@@ -36,31 +36,31 @@
SOCKET_MT.__index = SOCKET_MT
return {
- create = function()
- local skt, err = apr.socket_create('tcp')
- if not skt then return nil, err end
- return setmetatable({skt = skt}, SOCKET_MT)
- end,
- sleep = apr.sleep, -- exact same API as LuaSocket
-
- -- Base64 related functions
- --- Encodes a string into Base64 with line wrapping
- -- @param data (string) data to encode
- -- @return base64 encoded string
- b64 = function(data)
- t = {}
- local b64_data = apr.base64_encode(data)
- for i=1, #b64_data, 76 do t[#t+1] = b64_data:sub(i, i+75).."\r\n" end
- return table.concat(t)
- end,
+ create = function()
+ local skt, err = apr.socket_create('tcp')
+ if not skt then return nil, err end
+ return setmetatable({skt = skt}, SOCKET_MT)
+ end,
+ sleep = apr.sleep, -- exact same API as LuaSocket
- --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
- -- @param data (string) data to encode
- -- @return decoded string
- rawb64 = apr.base64_encode,
+ -- Base64 related functions
+ --- Encodes a string into Base64 with line wrapping
+ -- @param data (string) data to encode
+ -- @return base64 encoded string
+ b64 = function(data)
+ t = {}
+ local b64_data = apr.base64_encode(data)
+ for i=1, #b64_data, 76 do t[#t+1] = b64_data:sub(i, i+75).."\r\n" end
+ return table.concat(t)
+ end,
- --- Decodes base64 data
- -- @param data (string) base64 encoded data
- -- @return decoded string
- unb64 = apr.base64_decode,
+ --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
+ -- @param data (string) data to encode
+ -- @return decoded string
+ rawb64 = apr.base64_encode,
+
+ --- Decodes base64 data
+ -- @param data (string) base64 encoded data
+ -- @return decoded string
+ unb64 = apr.base64_decode,
}
diff --git a/libraries/luadbgpclient/debugger/transport/luasocket.lua b/libraries/luadbgpclient/debugger/transport/luasocket.lua
index 61c9094..cd589fa 100644
--- a/libraries/luadbgpclient/debugger/transport/luasocket.lua
+++ b/libraries/luadbgpclient/debugger/transport/luasocket.lua
@@ -11,7 +11,7 @@
-- LuaSocket backend for DBGP debugger.
-------------------------------------------------------------------------------
--- in order to be as lightweight as possible with Luasocket, core API is used
+-- in order to be as lightweight as possible with Luasocket, core API is used
-- directly (to no add yet another layer)
--FIXME: remove this hack as soon as luasocket officially support 5.2
@@ -26,31 +26,31 @@
return {
- create = socket.tcp,
- sleep = socket.sleep,
-
- -- Base64 related functions
- --- Encodes a string into Base64 with line wrapping
- -- @param data (string) data to encode
- -- @return base64 encoded string
- b64 = function(data)
- local filter = ltn12.filter.chain(mime.encode("base64"), mime.wrap("base64"))
- local sink, output = ltn12.sink.table()
- ltn12.pump.all(ltn12.source.string(data), ltn12.sink.chain(filter, sink))
- return table.concat(output)
- end,
+ create = socket.tcp,
+ sleep = socket.sleep,
- --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
- -- @param data (string) data to encode
- -- @return decoded string
- rawb64 = function(data)
- return (mime.b64(data)) -- first result of the low-level function is fine here
- end,
+ -- Base64 related functions
+ --- Encodes a string into Base64 with line wrapping
+ -- @param data (string) data to encode
+ -- @return base64 encoded string
+ b64 = function(data)
+ local filter = ltn12.filter.chain(mime.encode("base64"), mime.wrap("base64"))
+ local sink, output = ltn12.sink.table()
+ ltn12.pump.all(ltn12.source.string(data), ltn12.sink.chain(filter, sink))
+ return table.concat(output)
+ end,
- --- Decodes base64 data
- -- @param data (string) base64 encoded data
- -- @return decoded string
- unb64 = function(data)
- return (mime.unb64(data)) -- first result of the low-level function is fine here
- end,
+ --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
+ -- @param data (string) data to encode
+ -- @return decoded string
+ rawb64 = function(data)
+ return (mime.b64(data)) -- first result of the low-level function is fine here
+ end,
+
+ --- Decodes base64 data
+ -- @param data (string) base64 encoded data
+ -- @return decoded string
+ unb64 = function(data)
+ return (mime.unb64(data)) -- first result of the low-level function is fine here
+ end,
}
diff --git a/libraries/luadbgpclient/debugger/transport/luasocket_sched.lua b/libraries/luadbgpclient/debugger/transport/luasocket_sched.lua
index 8c452c3..bba813a 100644
--- a/libraries/luadbgpclient/debugger/transport/luasocket_sched.lua
+++ b/libraries/luadbgpclient/debugger/transport/luasocket_sched.lua
@@ -11,7 +11,7 @@
-- LuaSocket with LuaSched backend for DBGP debugger.
-------------------------------------------------------------------------------
--- As LuaShed totally hides blocking functions, this module MUST be loaded on the very start of the program
+-- As LuaShed totally hides blocking functions, this module MUST be loaded on the very start of the program
-- (before loading sched) to catch references to blocking functions.
local socketcore = require"socket.core"
@@ -48,31 +48,31 @@
package.loaded.socket = nil
return {
- create = function() return setmetatable({ skt = blockingcreate() }, blockingtcp) end,
- sleep = blockingsleep,
-
- -- Base64 related functions
- --- Encodes a string into Base64 with line wrapping
- -- @param data (string) data to encode
- -- @return base64 encoded string
- b64 = function(data)
- local filter = ltn12.filter.chain(mime.encode("base64"), mime.wrap("base64"))
- local sink, output = ltn12.sink.table()
- ltn12.pump.all(ltn12.source.string(data), ltn12.sink.chain(filter, sink))
- return table.concat(output)
- end,
+ create = function() return setmetatable({ skt = blockingcreate() }, blockingtcp) end,
+ sleep = blockingsleep,
- --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
- -- @param data (string) data to encode
- -- @return decoded string
- rawb64 = function(data)
- return (mime.b64(data)) -- first result of the low-level function is fine here
- end,
+ -- Base64 related functions
+ --- Encodes a string into Base64 with line wrapping
+ -- @param data (string) data to encode
+ -- @return base64 encoded string
+ b64 = function(data)
+ local filter = ltn12.filter.chain(mime.encode("base64"), mime.wrap("base64"))
+ local sink, output = ltn12.sink.table()
+ ltn12.pump.all(ltn12.source.string(data), ltn12.sink.chain(filter, sink))
+ return table.concat(output)
+ end,
- --- Decodes base64 data
- -- @param data (string) base64 encoded data
- -- @return decoded string
- unb64 = function(data)
- return (mime.unb64(data)) -- first result of the low-level function is fine here
- end,
+ --- Encodes a string into Base64, without any extra parsing (wrapping, ...)
+ -- @param data (string) data to encode
+ -- @return decoded string
+ rawb64 = function(data)
+ return (mime.b64(data)) -- first result of the low-level function is fine here
+ end,
+
+ --- Decodes base64 data
+ -- @param data (string) base64 encoded data
+ -- @return decoded string
+ unb64 = function(data)
+ return (mime.unb64(data)) -- first result of the low-level function is fine here
+ end,
}
diff --git a/libraries/luadbgpclient/debugger/util.lua b/libraries/luadbgpclient/debugger/util.lua
index 222ad7a..09d3056 100644
--- a/libraries/luadbgpclient/debugger/util.lua
+++ b/libraries/luadbgpclient/debugger/util.lua
@@ -20,48 +20,48 @@
-- Debugger features handling. Any feature can be get like any regular table, setting features result in
-- error for unknown or read-only features.
M.features = setmetatable({ }, {
- -- functions that format/validate data. If function is not provided, the feature cannot be modified.
- validators = {
- multiple_sessions = tonumber,
- encoding = tostring,
- max_children = tonumber,
- max_data = tonumber,
- max_depth = tonumber,
- show_hidden = tonumber,
- uri = tostring,
- log_level = function(level_name)
- -- set numerical index in internal var
- LOG_LEVEL = assert(LEVELS[level_name], "No such level")
- return level_name -- the displayed level is still the name
- end,
- },
- __index = {
- multiple_sessions = 0,
- encoding ="UTF-8",
- max_children = 32,
- max_data = 0xFFFF,
- max_depth = 1,
- show_hidden = 1,
- uri = "file",
- log_level = "WARNING",
- -- read only features
- language_supports_threads = 0,
- language_name = "Lua",
- language_version = _VERSION,
- protocol_version = 1,
- supports_async = 1,
- data_encoding = "base64",
- breakpoint_languages = "Lua",
- breakpoint_types = "line conditional",
- },
- __newindex = function(self, k, v)
- local mt = getmetatable(self)
- local values, validator = mt.__index, mt.validators[k]
- if values[k] == nil then error("No such feature " .. tostring(k)) end
- if not validator then error("The feature " .. tostring(k) .. " is read-only") end
- v = assert(validator(v))
- values[k] = v
+ -- functions that format/validate data. If function is not provided, the feature cannot be modified.
+ validators = {
+ multiple_sessions = tonumber,
+ encoding = tostring,
+ max_children = tonumber,
+ max_data = tonumber,
+ max_depth = tonumber,
+ show_hidden = tonumber,
+ uri = tostring,
+ log_level = function(level_name)
+ -- set numerical index in internal var
+ LOG_LEVEL = assert(LEVELS[level_name], "No such level")
+ return level_name -- the displayed level is still the name
end,
+ },
+ __index = {
+ multiple_sessions = 0,
+ encoding ="UTF-8",
+ max_children = 32,
+ max_data = 0xFFFF,
+ max_depth = 1,
+ show_hidden = 1,
+ uri = "file",
+ log_level = "WARNING",
+ -- read only features
+ language_supports_threads = 0,
+ language_name = "Lua",
+ language_version = _VERSION,
+ protocol_version = 1,
+ supports_async = 1,
+ data_encoding = "base64",
+ breakpoint_languages = "Lua",
+ breakpoint_types = "line conditional",
+ },
+ __newindex = function(self, k, v)
+ local mt = getmetatable(self)
+ local values, validator = mt.__index, mt.validators[k]
+ if values[k] == nil then error("No such feature " .. tostring(k)) end
+ if not validator then error("The feature " .. tostring(k) .. " is read-only") end
+ v = assert(validator(v))
+ values[k] = v
+ end,
})
-- Wraps debug function and an attached thread
@@ -70,9 +70,9 @@
-- Foreign thread is used to debug paused thread
local ForeignThreadMT = {
- getinfo = function(self, level, what) return getinfo(self[1], level, what) end,
- getlocal = function(self, level, idx) return getlocal(self[1], level, idx) end,
- setlocal = function(self, level, idx, val) return setlocal(self[1], level, idx, val) end,
+ getinfo = function(self, level, what) return getinfo(self[1], level, what) end,
+ getlocal = function(self, level, idx) return getlocal(self[1], level, idx) end,
+ setlocal = function(self, level, idx, val) return setlocal(self[1], level, idx, val) end,
}
ForeignThreadMT.__index = ForeignThreadMT
function M.ForeignThread(coro) return setmetatable({ coro }, ForeignThreadMT) end
@@ -80,9 +80,9 @@
-- Current thread is used to debug the thread that caused the hook
-- intended to be used *ONLY* in debug loop (executed in a new thread)
local CurrentThreadMT = {
- getinfo = function(self, level, what) return getinfo(self[1], level + 2, what) end,
- getlocal = function(self, level, idx) return getlocal(self[1], level + 2, idx) end,
- setlocal = function(self, level, idx, val) return setlocal(self[1], level + 2, idx, val) end,
+ getinfo = function(self, level, what) return getinfo(self[1], level + 2, what) end,
+ getlocal = function(self, level, idx) return getlocal(self[1], level + 2, idx) end,
+ setlocal = function(self, level, idx, val) return setlocal(self[1], level + 2, idx, val) end,
}
CurrentThreadMT.__index = CurrentThreadMT
function M.CurrentThread(coro) return setmetatable({ coro }, CurrentThreadMT) end
@@ -90,88 +90,88 @@
-- Some version dependant functions
if _VERSION == "Lua 5.1" then
- local loadstring, getfenv, setfenv, debug_getinfo, MainThread =
- loadstring, getfenv, setfenv, debug.getinfo, nil
+ local loadstring, getfenv, setfenv, debug_getinfo, MainThread =
+ loadstring, getfenv, setfenv, debug.getinfo, nil
- -- in 5.1 "t" flag does not exist and trigger an error so remove it from what
- CurrentThreadMT.getinfo = function(self, level, what) return getinfo(self[1], level + 2, what:gsub("t", "", 1)) end
- ForeignThreadMT.getinfo = function(self, level, what) return getinfo(self[1], level, what:gsub("t", "", 1)) end
+ -- in 5.1 "t" flag does not exist and trigger an error so remove it from what
+ CurrentThreadMT.getinfo = function(self, level, what) return getinfo(self[1], level + 2, what:gsub("t", "", 1)) end
+ ForeignThreadMT.getinfo = function(self, level, what) return getinfo(self[1], level, what:gsub("t", "", 1)) end
- -- when we're forced to start debug loop on top of program stack (when on main coroutine)
- -- this requires some hackery to get right stack level
+ -- when we're forced to start debug loop on top of program stack (when on main coroutine)
+ -- this requires some hackery to get right stack level
- -- Fallback method to inspect running thread (only for main thread in 5.1 or for conditional breakpoints)
- --- Gets a script stack level with additional debugger logic added
- -- @param l (number) stack level to get for debugged script (0 based)
- -- @return real Lua stack level suitable to be passed through deubg functions
- local function get_script_level(l)
- local hook = debug.gethook()
- for i=2, math.huge do
- if assert(debug.getinfo(i, "f")).func == hook then
- return i + l -- the script to level is just below, but because of the extra call to this function, the level is ok for callee
- end
- end
- end
-
- if rawget(_G, "jit") then
- MainThread = {
- [1] = "main", -- as the raw thread object is used as table keys, provide a replacement.
- -- LuaJIT completely eliminates tail calls from stack, so get_script_level retunrs wrong result in this case
- getinfo = function(self, level, what) return getinfo(get_script_level(level) - 1, what:gsub("t", "", 1)) end,
- getlocal = function(self, level, idx) return getlocal(get_script_level(level) - 1, idx) end,
- setlocal = function(self, level, idx, val) return setlocal(get_script_level(level) - 1, idx, val) end,
- }
- else
- MainThread = {
- [1] = "main",
- getinfo = function(self, level, what) return getinfo(get_script_level(level) , what:gsub("t", "", 1)) end,
- getlocal = function(self, level, idx) return getlocal(get_script_level(level), idx) end,
- setlocal = function(self, level, idx, val) return setlocal(get_script_level(level), idx, val) end,
- }
- end
-
-
-
- -- If the VM is vanilla Lua 5.1 or LuaJIT 2 without 5.2 compatibility, there is no way to get a reference to
- -- the main coroutine, so fall back to direct mode: the debugger loop is started on the top of main thread
- -- and the actual level is recomputed each time
- local oldCurrentThread = M.CurrentThread
- M.CurrentThread = function(coro) return coro and oldCurrentThread(coro) or MainThread end
-
- -- load a piece of code alog with its environment
- function M.loadin(code, env)
- local f,err = loadstring(code)
- if not f then
- return nil, err
- else
- return f and setfenv(f, env)
+ -- Fallback method to inspect running thread (only for main thread in 5.1 or for conditional breakpoints)
+ --- Gets a script stack level with additional debugger logic added
+ -- @param l (number) stack level to get for debugged script (0 based)
+ -- @return real Lua stack level suitable to be passed through deubg functions
+ local function get_script_level(l)
+ local hook = debug.gethook()
+ for i=2, math.huge do
+ if assert(debug.getinfo(i, "f")).func == hook then
+ return i + l -- the script to level is just below, but because of the extra call to this function, the level is ok for callee
end
end
-
- -- table that maps [gs]et environment to index
- M.eval_env = setmetatable({ }, {
- __index = function(self, func) return getfenv(func) end,
- __newindex = function(self, func, env) return setfenv(func, env) end,
- })
-elseif _VERSION == "Lua 5.2" then
- local load, debug_getinfo = load, debug.getinfo
- function M.getinfo(coro, level, what)
- if coro then return debug_getinfo(coro, level, what)
- else return debug_getinfo(level + 1, what) end
+ end
+
+ if rawget(_G, "jit") then
+ MainThread = {
+ [1] = "main", -- as the raw thread object is used as table keys, provide a replacement.
+ -- LuaJIT completely eliminates tail calls from stack, so get_script_level retunrs wrong result in this case
+ getinfo = function(self, level, what) return getinfo(get_script_level(level) - 1, what:gsub("t", "", 1)) end,
+ getlocal = function(self, level, idx) return getlocal(get_script_level(level) - 1, idx) end,
+ setlocal = function(self, level, idx, val) return setlocal(get_script_level(level) - 1, idx, val) end,
+ }
+ else
+ MainThread = {
+ [1] = "main",
+ getinfo = function(self, level, what) return getinfo(get_script_level(level) , what:gsub("t", "", 1)) end,
+ getlocal = function(self, level, idx) return getlocal(get_script_level(level), idx) end,
+ setlocal = function(self, level, idx, val) return setlocal(get_script_level(level), idx, val) end,
+ }
+ end
+
+
+
+ -- If the VM is vanilla Lua 5.1 or LuaJIT 2 without 5.2 compatibility, there is no way to get a reference to
+ -- the main coroutine, so fall back to direct mode: the debugger loop is started on the top of main thread
+ -- and the actual level is recomputed each time
+ local oldCurrentThread = M.CurrentThread
+ M.CurrentThread = function(coro) return coro and oldCurrentThread(coro) or MainThread end
+
+ -- load a piece of code alog with its environment
+ function M.loadin(code, env)
+ local f,err = loadstring(code)
+ if not f then
+ return nil, err
+ else
+ return f and setfenv(f, env)
end
-
- function M.loadin(code, env) return load(code, nil, nil, env) end
-
- -- no eval_env for 5.2 as functions does not have environments anymore
+ end
+
+ -- table that maps [gs]et environment to index
+ M.eval_env = setmetatable({ }, {
+ __index = function(self, func) return getfenv(func) end,
+ __newindex = function(self, func, env) return setfenv(func, env) end,
+ })
+elseif _VERSION == "Lua 5.2" then
+ local load, debug_getinfo = load, debug.getinfo
+ function M.getinfo(coro, level, what)
+ if coro then return debug_getinfo(coro, level, what)
+ else return debug_getinfo(level + 1, what) end
+ end
+
+ function M.loadin(code, env) return load(code, nil, nil, env) end
+
+ -- no eval_env for 5.2 as functions does not have environments anymore
end
-- ----------------------------------------------------------------------------
-- Bare minimal log system.
-- ----------------------------------------------------------------------------
function M.log(level, msg, ...)
- if (LEVELS[level] or -1) > LOG_LEVEL then return end
- if select("#", ...) > 0 then msg = msg:format(...) end
- io.base.stderr:write(string.format("DEBUGGER\t%s\t%s\n", level, msg))
+ if (LEVELS[level] or -1) > LOG_LEVEL then return end
+ if select("#", ...) > 0 then msg = msg:format(...) end
+ io.base.stderr:write(string.format("DEBUGGER\t%s\t%s\n", level, msg))
end
return M
diff --git a/libraries/luadocumentor/docgenerator.lua b/libraries/luadocumentor/docgenerator.lua
index 23469e5..f99ebcf 100644
--- a/libraries/luadocumentor/docgenerator.lua
+++ b/libraries/luadocumentor/docgenerator.lua
@@ -15,7 +15,7 @@
--
local templateengine = require 'templateengine'
for name, def in pairs( require 'template.utils' ) do
- templateengine.env [ name ] = def
+ templateengine.env [ name ] = def
end
-- Load documentation extractor and set handled languages
@@ -25,63 +25,63 @@
M.defaultsitemainpagename = 'index'
function M.generatedocforfiles(filenames, cssname,noheuristic)
- if not filenames then return nil, 'No files provided.' end
- --
- -- Generate API model elements for all files
- --
- local generatedfiles = {}
- local wrongfiles = {}
- for _, filename in pairs( filenames ) do
- -- Load file content
- local file, error = io.open(filename, 'r')
- if not file then return nil, 'Unable to read "'..filename..'"\n'..err end
- local code = file:read('*all')
- file:close()
- -- Get module for current file
- local apimodule, err = lddextractor.generateapimodule(filename, code,noheuristic)
-
- -- Handle modules with module name
- if apimodule and apimodule.name then
- generatedfiles[ apimodule.name ] = apimodule
- elseif not apimodule then
- -- Track faulty files
- table.insert(wrongfiles, 'Unable to extract comments from "'..filename..'".\n'..err)
- elseif not apimodule.name then
- -- Do not generate documentation for unnamed modules
- table.insert(wrongfiles, 'Unable to create documentation for "'..filename..'", no module name provided.')
- end
- end
- --
- -- Defining index, which will summarize all modules
- --
- local index = {
- modules = generatedfiles,
- name = M.defaultsitemainpagename,
- tag='index'
- }
- generatedfiles[ M.defaultsitemainpagename ] = index
+ if not filenames then return nil, 'No files provided.' end
+ --
+ -- Generate API model elements for all files
+ --
+ local generatedfiles = {}
+ local wrongfiles = {}
+ for _, filename in pairs( filenames ) do
+ -- Load file content
+ local file, error = io.open(filename, 'r')
+ if not file then return nil, 'Unable to read "'..filename..'"\n'..err end
+ local code = file:read('*all')
+ file:close()
+ -- Get module for current file
+ local apimodule, err = lddextractor.generateapimodule(filename, code,noheuristic)
- --
- -- Define page cursor
- --
- local page = {
- currentmodule = nil,
- headers = { [[<link rel="stylesheet" href="]].. cssname ..[[" type="text/css"/>]] },
- modules = generatedfiles,
- tag = 'page'
- }
+ -- Handle modules with module name
+ if apimodule and apimodule.name then
+ generatedfiles[ apimodule.name ] = apimodule
+ elseif not apimodule then
+ -- Track faulty files
+ table.insert(wrongfiles, 'Unable to extract comments from "'..filename..'".\n'..err)
+ elseif not apimodule.name then
+ -- Do not generate documentation for unnamed modules
+ table.insert(wrongfiles, 'Unable to create documentation for "'..filename..'", no module name provided.')
+ end
+ end
+ --
+ -- Defining index, which will summarize all modules
+ --
+ local index = {
+ modules = generatedfiles,
+ name = M.defaultsitemainpagename,
+ tag='index'
+ }
+ generatedfiles[ M.defaultsitemainpagename ] = index
- --
- -- Iterate over modules, generating complete doc pages
- --
- for _, module in pairs( generatedfiles ) do
- -- Update current cursor page
- page.currentmodule = module
- -- Generate page
- local content, error = templateengine.applytemplate(page)
- if not content then return nil, error end
- module.body = content
- end
- return generatedfiles, wrongfiles
+ --
+ -- Define page cursor
+ --
+ local page = {
+ currentmodule = nil,
+ headers = { [[<link rel="stylesheet" href="]].. cssname ..[[" type="text/css"/>]] },
+ modules = generatedfiles,
+ tag = 'page'
+ }
+
+ --
+ -- Iterate over modules, generating complete doc pages
+ --
+ for _, module in pairs( generatedfiles ) do
+ -- Update current cursor page
+ page.currentmodule = module
+ -- Generate page
+ local content, error = templateengine.applytemplate(page)
+ if not content then return nil, error end
+ module.body = content
+ end
+ return generatedfiles, wrongfiles
end
return M
diff --git a/libraries/luadocumentor/extractors.lua b/libraries/luadocumentor/extractors.lua
index 7033c6a..27a0145 100644
--- a/libraries/luadocumentor/extractors.lua
+++ b/libraries/luadocumentor/extractors.lua
@@ -17,86 +17,86 @@
-- Enable to retrieve all Javadoc-like comments from C code
function M.c(code)
- if not code then return nil, 'No code provided' end
- local comments = {}
- -- Loop over comments stripping cosmetic '*'
- for comment in code:gmatch('%s*/%*%*+(.-)%*+/') do
- -- All Lua special comment are prefixed with an '-',
- -- so we also comment C comment to make them compliant
- table.insert(comments, '-'..comment)
- end
- return comments
+ if not code then return nil, 'No code provided' end
+ local comments = {}
+ -- Loop over comments stripping cosmetic '*'
+ for comment in code:gmatch('%s*/%*%*+(.-)%*+/') do
+ -- All Lua special comment are prefixed with an '-',
+ -- so we also comment C comment to make them compliant
+ table.insert(comments, '-'..comment)
+ end
+ return comments
end
-- Enable to retrieve "---" comments from Lua code
function M.lua( code )
- if not code then return nil, 'No code provided' end
-
- -- manage shebang
+ if not code then return nil, 'No code provided' end
+
+ -- manage shebang
if code then code = code:gsub("^(#.-\n)", function (s) return string.rep(' ',string.len(s)) end) end
-
+
-- check for errors
local f, err = loadstring(code,'source_to_check')
- if not f then
+ if not f then
return nil, 'Syntax error.\n' .. err
end
-
- -- Get ast from file
- local status, ast = pcall(mlc.src_to_ast, mlc, code)
- --
- -- Detect parsing errors
- --
- if not status then
- return nil, 'There might be a syntax error.\n' .. ast
- end
- --
- -- Extract commented nodes from AST
- --
+ -- Get ast from file
+ local status, ast = pcall(mlc.src_to_ast, mlc, code)
+ --
+ -- Detect parsing errors
+ --
+ if not status then
+ return nil, 'There might be a syntax error.\n' .. ast
+ end
- -- Function enabling commented node selection
- local function acceptcommentednode(node)
- return node.lineinfo and ( node.lineinfo.last.comments or node.lineinfo.first.comments )
- end
+ --
+ -- Extract commented nodes from AST
+ --
- -- Fetch commented node from AST
- local commentednodes = Q(ast):filter( acceptcommentednode ):list()
+ -- Function enabling commented node selection
+ local function acceptcommentednode(node)
+ return node.lineinfo and ( node.lineinfo.last.comments or node.lineinfo.first.comments )
+ end
- -- Comment cache to avoid selecting same comment twice
- local commentcache = {}
- -- Will contain selected comments
- local comments = {}
+ -- Fetch commented node from AST
+ local commentednodes = Q(ast):filter( acceptcommentednode ):list()
- -- Loop over commented nodes
- for _, node in ipairs( commentednodes ) do
+ -- Comment cache to avoid selecting same comment twice
+ local commentcache = {}
+ -- Will contain selected comments
+ local comments = {}
- -- A node can is relateds to comment before and after itself,
- -- the following gathers them.
- local commentlists = {}
- if node.lineinfo and node.lineinfo.first.comments then
- table.insert(commentlists, node.lineinfo.first.comments)
- end
- if node.lineinfo and node.lineinfo.last.comments then
- table.insert(commentlists, node.lineinfo.last.comments)
- end
- -- Now that we have comments before and fater the node,
- -- collect them in a single table
- for _, list in ipairs( commentlists ) do
- for _, commenttable in ipairs(list) do
- -- Only select special comments
- local firstcomment = #commenttable > 0 and #commenttable[1] > 0 and commenttable[1]
- if firstcomment:sub(1, 1) == '-' then
- for _, comment in ipairs( commenttable ) do
- -- Only comments which were not already collected
- if not commentcache[comment] then
- commentcache[comment] = true
- table.insert(comments, comment)
- end
- end
- end
- end
- end
- end
- return comments
+ -- Loop over commented nodes
+ for _, node in ipairs( commentednodes ) do
+
+ -- A node can is relateds to comment before and after itself,
+ -- the following gathers them.
+ local commentlists = {}
+ if node.lineinfo and node.lineinfo.first.comments then
+ table.insert(commentlists, node.lineinfo.first.comments)
+ end
+ if node.lineinfo and node.lineinfo.last.comments then
+ table.insert(commentlists, node.lineinfo.last.comments)
+ end
+ -- Now that we have comments before and fater the node,
+ -- collect them in a single table
+ for _, list in ipairs( commentlists ) do
+ for _, commenttable in ipairs(list) do
+ -- Only select special comments
+ local firstcomment = #commenttable > 0 and #commenttable[1] > 0 and commenttable[1]
+ if firstcomment:sub(1, 1) == '-' then
+ for _, comment in ipairs( commenttable ) do
+ -- Only comments which were not already collected
+ if not commentcache[comment] then
+ commentcache[comment] = true
+ table.insert(comments, comment)
+ end
+ end
+ end
+ end
+ end
+ end
+ return comments
end
return M
diff --git a/libraries/luadocumentor/fs/lfs.lua b/libraries/luadocumentor/fs/lfs.lua
index 835f588..a4cc9d4 100644
--- a/libraries/luadocumentor/fs/lfs.lua
+++ b/libraries/luadocumentor/fs/lfs.lua
@@ -12,119 +12,119 @@
local lfs = require 'lfs'
local M = {}
local function iswindows()
- local p = io.popen("echo %os%")
- if not p then
- return false
- end
- local result =p:read("*l")
- p:close()
- return result == "Windows_NT"
+ local p = io.popen("echo %os%")
+ if not p then
+ return false
+ end
+ local result =p:read("*l")
+ p:close()
+ return result == "Windows_NT"
end
M.separator = iswindows() and [[\]] or [[/]]
---
-- Will recursively browse given directories and list files encountered
-- @param tab Table, list where files will be added
-- @param dirorfiles list of path to browse in order to build list.
--- Files from this list will be added to <code>tab</code> list.
+-- Files from this list will be added to <code>tab</code> list.
-- @return <code>tab</code> list, table containing all files from directories
--- and files contained in <code>dirorfile</code>
+-- and files contained in <code>dirorfile</code>
local function appendfiles(tab, dirorfile)
- -- Nothing to process
- if #dirorfile < 1 then return tab end
+ -- Nothing to process
+ if #dirorfile < 1 then return tab end
- -- Append all files to list
- local dirs = {}
- for _, path in ipairs( dirorfile ) do
- -- Determine element nature
- local elementnature = lfs.attributes (path, "mode")
+ -- Append all files to list
+ local dirs = {}
+ for _, path in ipairs( dirorfile ) do
+ -- Determine element nature
+ local elementnature = lfs.attributes (path, "mode")
- -- Handle files
- if elementnature == 'file' then
- table.insert(tab, path)
- else if elementnature == 'directory' then
+ -- Handle files
+ if elementnature == 'file' then
+ table.insert(tab, path)
+ else if elementnature == 'directory' then
- -- Check if folder is accessible
- local status, error = pcall(lfs.dir, path)
- if not status then return nil, error end
+ -- Check if folder is accessible
+ local status, error = pcall(lfs.dir, path)
+ if not status then return nil, error end
- --
- -- Handle folders
- --
- for diskelement in lfs.dir(path) do
+ --
+ -- Handle folders
+ --
+ for diskelement in lfs.dir(path) do
- -- Format current file name
- local currentfilename
- if path:sub(#path) == M.separator then
- currentfilename = path .. diskelement
- else
- currentfilename = path .. M.separator .. diskelement
- end
+ -- Format current file name
+ local currentfilename
+ if path:sub(#path) == M.separator then
+ currentfilename = path .. diskelement
+ else
+ currentfilename = path .. M.separator .. diskelement
+ end
- -- Handle folder elements
- local nature, err = lfs.attributes (currentfilename, "mode")
- -- Append file to current list
- if nature == 'file' then
- table.insert(tab, currentfilename)
- elseif nature == 'directory' then
- -- Avoid current and parent directory in order to avoid
- -- endless recursion
- if diskelement ~= '.' and diskelement ~= '..' then
- -- Handle subfolders
- table.insert(dirs, currentfilename)
- end
- end
- end
- end
- end
- end
- -- If we only encountered files, going deeper is useless
- if #dirs == 0 then return tab end
- -- Append files from encountered directories
- return appendfiles(tab, dirs)
+ -- Handle folder elements
+ local nature, err = lfs.attributes (currentfilename, "mode")
+ -- Append file to current list
+ if nature == 'file' then
+ table.insert(tab, currentfilename)
+ elseif nature == 'directory' then
+ -- Avoid current and parent directory in order to avoid
+ -- endless recursion
+ if diskelement ~= '.' and diskelement ~= '..' then
+ -- Handle subfolders
+ table.insert(dirs, currentfilename)
+ end
+ end
+ end
+ end
+ end
+ end
+ -- If we only encountered files, going deeper is useless
+ if #dirs == 0 then return tab end
+ -- Append files from encountered directories
+ return appendfiles(tab, dirs)
end
---
-- Provide a list of files from a directory
-- @param list Table of directories to browse
-- @return table of string, path to files contained in given directories
function M.filelist(list)
- if not list then return nil, 'No directory list provided' end
- return appendfiles({}, list)
+ if not list then return nil, 'No directory list provided' end
+ return appendfiles({}, list)
end
function M.checkdirectory( dirlist )
- if not dirlist then return false end
- local missingdirs = {}
- for _, filename in ipairs( dirlist ) do
- if not lfs.attributes(filename, 'mode') then
- table.insert(missingdirs, filename)
- end
- end
- if #missingdirs > 0 then
- return false, missingdirs
- end
- return true
+ if not dirlist then return false end
+ local missingdirs = {}
+ for _, filename in ipairs( dirlist ) do
+ if not lfs.attributes(filename, 'mode') then
+ table.insert(missingdirs, filename)
+ end
+ end
+ if #missingdirs > 0 then
+ return false, missingdirs
+ end
+ return true
end
function M.fill(filename, content)
- --
- -- Ensure parent directory exists
- --
- local parent = filename:gmatch([[(.*)]] .. M.separator ..[[(.+)]])()
- local parentnature = lfs.attributes(parent, 'mode')
- -- Create parent directory while absent
- if not parentnature then
- lfs.mkdir( parent )
- elseif parentnature ~= 'directory' then
- -- Notify that disk element already exists
- return nil, parent..' is a '..parentnature..'.'
- end
+ --
+ -- Ensure parent directory exists
+ --
+ local parent = filename:gmatch([[(.*)]] .. M.separator ..[[(.+)]])()
+ local parentnature = lfs.attributes(parent, 'mode')
+ -- Create parent directory while absent
+ if not parentnature then
+ lfs.mkdir( parent )
+ elseif parentnature ~= 'directory' then
+ -- Notify that disk element already exists
+ return nil, parent..' is a '..parentnature..'.'
+ end
- -- Create actual file
- local file, error = io.open(filename, 'w')
- if not file then
- return nil, error
- end
- file:write( content )
- file:close()
- return true
+ -- Create actual file
+ local file, error = io.open(filename, 'w')
+ if not file then
+ return nil, error
+ end
+ file:write( content )
+ file:close()
+ return true
end
return M
diff --git a/libraries/luadocumentor/lddextractor.lua b/libraries/luadocumentor/lddextractor.lua
index 8974a86..93579f0 100644
--- a/libraries/luadocumentor/lddextractor.lua
+++ b/libraries/luadocumentor/lddextractor.lua
@@ -25,52 +25,52 @@
-- Support C comment extracting
for _,c in ipairs({'c', 'cpp', 'c++'}) do
- M.supportedlanguages[c] = extractors.c
+ M.supportedlanguages[c] = extractors.c
end
-- Extract comment from code,
-- type of code is deduced from filename extension
function M.extract(filename, code)
- -- Check parameters
- if not code then return nil, 'No code provided' end
- if type(filename) ~= "string" then
- return nil, 'No string for file name provided'
- end
+ -- Check parameters
+ if not code then return nil, 'No code provided' end
+ if type(filename) ~= "string" then
+ return nil, 'No string for file name provided'
+ end
- -- Extract file extension
- local fileextension = filename:gmatch('.*%.(.*)')()
- if not fileextension then
- return nil, 'File '..filename..' has no extension, could not determine how to extract documentation.'
- end
+ -- Extract file extension
+ local fileextension = filename:gmatch('.*%.(.*)')()
+ if not fileextension then
+ return nil, 'File '..filename..' has no extension, could not determine how to extract documentation.'
+ end
- -- Check if it is possible to extract documentation from these files
- local extractor = M.supportedlanguages[ fileextension ]
- if not extractor then
- return nil, 'Unable to extract documentation from '.. fileextension .. ' file.'
- end
- return extractor( code )
+ -- Check if it is possible to extract documentation from these files
+ local extractor = M.supportedlanguages[ fileextension ]
+ if not extractor then
+ return nil, 'Unable to extract documentation from '.. fileextension .. ' file.'
+ end
+ return extractor( code )
end
-- Generate a file gathering only comments from given code
function M.generatecommentfile(filename, code)
- local comments, error = M.extract(filename, code)
- if not comments then
- return nil, 'Unable to generate comment file.\n'..error
- end
- local filecontent = {}
- for _, comment in ipairs( comments ) do
- table.insert(filecontent, "--[[")
- table.insert(filecontent, comment)
- table.insert(filecontent, "\n]]\n\n")
- end
- return table.concat(filecontent)..'return nil\n'
+ local comments, error = M.extract(filename, code)
+ if not comments then
+ return nil, 'Unable to generate comment file.\n'..error
+ end
+ local filecontent = {}
+ for _, comment in ipairs( comments ) do
+ table.insert(filecontent, "--[[")
+ table.insert(filecontent, comment)
+ table.insert(filecontent, "\n]]\n\n")
+ end
+ return table.concat(filecontent)..'return nil\n'
end
-- Create API Model module from a 'comment only' lua file
function M.generateapimodule(filename, code,noheuristic)
- if not filename then return nil, 'No file name given.' end
- if not code then return nil, 'No code provided.' end
- if type(filename) ~= "string" then return nil, 'No string for file name provided' end
-
- -- for non lua file get comment file
+ if not filename then return nil, 'No file name given.' end
+ if not code then return nil, 'No code provided.' end
+ if type(filename) ~= "string" then return nil, 'No string for file name provided' end
+
+ -- for non lua file get comment file
if filename:gmatch('.*%.(.*)')() ~= 'lua' then
local err
code, err = M.generatecommentfile(filename, code)
@@ -80,25 +80,25 @@
else
-- manage shebang
if code then code = code:gsub("^(#.-\n)", function (s) return string.rep(' ',string.len(s)) end) end
-
+
-- check for errors
local f, err = loadstring(code,'source_to_check')
if not f then
return nil, 'File'..filename..'contains syntax error.\n' .. err
end
end
-
- local status, ast = pcall(mlc.src_to_ast, mlc, code)
- if not status then
- return nil, 'Unable to compute ast for "'..filename..'".\n'..ast
- end
-
+
+ local status, ast = pcall(mlc.src_to_ast, mlc, code)
+ if not status then
+ return nil, 'Unable to compute ast for "'..filename..'".\n'..ast
+ end
+
-- Extract module name as the filename without extension
local modulename
local matcher = string.gmatch(filename,'.*/(.*)%..*$')
if matcher then modulename = matcher() end
-
- -- Create api model
+
+ -- Create api model
local apimodelbuilder = require 'models.apimodelbuilder'
local _file, comment2apiobj = apimodelbuilder.createmoduleapi(ast, modulename)
@@ -107,6 +107,6 @@
local internalmodelbuilder = require "models.internalmodelbuilder"
local _internalcontent = internalmodelbuilder.createinternalcontent(ast,_file,comment2apiobj, modulename)
end
- return _file
+ return _file
end
return M
diff --git a/libraries/luadocumentor/luadocumentor.lua b/libraries/luadocumentor/luadocumentor.lua
index e12334a..865ff24 100755
--- a/libraries/luadocumentor/luadocumentor.lua
+++ b/libraries/luadocumentor/luadocumentor.lua
@@ -13,8 +13,8 @@
-- Check interpreter version
if _VERSION ~= "Lua 5.1" then
- print("Luadocumentor is only compatible with Lua 5.1")
- return
+ print("Luadocumentor is only compatible with Lua 5.1")
+ return
end
-- try to define the right lua path.
@@ -22,17 +22,17 @@
local luadocumentordirpath
local debugpath = debug.getinfo(1).source;
if debugpath then
- -- extract the directory path of luadocumentor.lua
- luadocumentordirpath = string.match(debugpath,"^@(.*)luadocumentor.lua$")
- if luadocumentordirpath then
- if luadocumentordirpath == "" then luadocumentordirpath = "./" end
- -- change lua path and mpath to not load system version of metalua
- package.path = luadocumentordirpath.."?.lua;"..luadocumentordirpath.."?.luac;"
- require "metalua.loader"
- package.mpath = luadocumentordirpath.."?.mlua;"
- -- do not change cpath to have access to lfs.
- -- (it must be already installed)
- end
+ -- extract the directory path of luadocumentor.lua
+ luadocumentordirpath = string.match(debugpath,"^@(.*)luadocumentor.lua$")
+ if luadocumentordirpath then
+ if luadocumentordirpath == "" then luadocumentordirpath = "./" end
+ -- change lua path and mpath to not load system version of metalua
+ package.path = luadocumentordirpath.."?.lua;"..luadocumentordirpath.."?.luac;"
+ require "metalua.loader"
+ package.mpath = luadocumentordirpath.."?.mlua;"
+ -- do not change cpath to have access to lfs.
+ -- (it must be already installed)
+ end
end
--
@@ -57,12 +57,12 @@
local args = lapp( help )
if not args or #args < 1 then
- print('No directory provided')
- return
+ print('No directory provided')
+ return
elseif args.help then
- -- Just print help
- print( help )
- return
+ -- Just print help
+ print( help )
+ return
end
--
@@ -80,117 +80,117 @@
-- Some of given directories are absent
if missing then
- -- List missing directories
- print 'Unable to open'
- for _, file in ipairs( missing ) do
- print('\t'.. file)
- end
- return
+ -- List missing directories
+ print 'Unable to open'
+ for _, file in ipairs( missing ) do
+ print('\t'.. file)
+ end
+ return
end
-- Get files from given directories
local filestoparse, error = fs.filelist( args )
if not filestoparse then
- print ( error )
- return
+ print ( error )
+ return
end
--
-- Generate documentation only files
--
if args.format == 'api' then
- for _, filename in ipairs( filestoparse ) do
+ for _, filename in ipairs( filestoparse ) do
- -- Loading file content
- print('Dealing with "'..filename..'".')
- local file, error = io.open(filename, 'r')
- if not file then
- print ('Unable to open "'..filename.."'.\n"..error)
- else
- local code = file:read('*all')
- file:close()
+ -- Loading file content
+ print('Dealing with "'..filename..'".')
+ local file, error = io.open(filename, 'r')
+ if not file then
+ print ('Unable to open "'..filename.."'.\n"..error)
+ else
+ local code = file:read('*all')
+ file:close()
- --
- -- Creating comment file
- --
- local commentfile, error = lddextractor.generatecommentfile(filename, code)
+ --
+ -- Creating comment file
+ --
+ local commentfile, error = lddextractor.generatecommentfile(filename, code)
- -- Getting module name
- -- Optimize me
- local module, moduleerror = lddextractor.generateapimodule(filename, code)
- if not commentfile then
- print('Unable to create documentation file for "'..filename..'"\n'..error)
- elseif not module or not module.name then
- local error = moduleerror and '\n'..moduleerror or ''
- print('Unable to compute module name for "'..filename..'".'..error)
- else
- --
- -- Flush documentation file on disk
- --
- local path = args.dir..fs.separator..module.name..'.lua'
- local status, err = fs.fill(path, commentfile)
- if not status then
- print(err)
- end
- end
- end
- end
- print('Done')
- return
+ -- Getting module name
+ -- Optimize me
+ local module, moduleerror = lddextractor.generateapimodule(filename, code)
+ if not commentfile then
+ print('Unable to create documentation file for "'..filename..'"\n'..error)
+ elseif not module or not module.name then
+ local error = moduleerror and '\n'..moduleerror or ''
+ print('Unable to compute module name for "'..filename..'".'..error)
+ else
+ --
+ -- Flush documentation file on disk
+ --
+ local path = args.dir..fs.separator..module.name..'.lua'
+ local status, err = fs.fill(path, commentfile)
+ if not status then
+ print(err)
+ end
+ end
+ end
+ end
+ print('Done')
+ return
end
-- Deal only supported output types
if args.format ~= 'doc' then
- print ('"'..args.format..'" format is not handled.')
- return
+ print ('"'..args.format..'" format is not handled.')
+ return
end
-- Generate html form files
local parsedfiles, unparsed = docgenerator.generatedocforfiles(filestoparse, cssfilename,args.noheuristic)
-- Show warnings on unparsed files
if #unparsed > 0 then
- for _, faultyfile in ipairs( unparsed ) do
- print( faultyfile )
- end
+ for _, faultyfile in ipairs( unparsed ) do
+ print( faultyfile )
+ end
end
-- This loop is just for counting parsed files
-- TODO: Find a more elegant way to do it
local parsedfilescount = 0
for _, p in pairs(parsedfiles) do
- parsedfilescount = parsedfilescount + 1
+ parsedfilescount = parsedfilescount + 1
end
print (parsedfilescount .. ' file(s) parsed.')
-- Create html files
local generated = 0
for _, apifile in pairs ( parsedfiles ) do
- local status, err = fs.fill(args.dir..fs.separator..apifile.name..'.html', apifile.body)
- if status then
- generated = generated + 1
- else
- print( 'Unable to create '..apifile.name..'.html on disk.')
- end
+ local status, err = fs.fill(args.dir..fs.separator..apifile.name..'.html', apifile.body)
+ if status then
+ generated = generated + 1
+ else
+ print( 'Unable to create '..apifile.name..'.html on disk.')
+ end
end
print (generated .. ' file(s) generated.')
-- Copying css
local csscontent
if args.style == '!' then
- csscontent = require 'defaultcss'
+ csscontent = require 'defaultcss'
else
- local css, error = io.open(args.style, 'r')
- if not css then
- print('Unable to open "'..args.style .. '".\n'..error)
- return
- end
- csscontent = css:read("*all")
- css:close()
+ local css, error = io.open(args.style, 'r')
+ if not css then
+ print('Unable to open "'..args.style .. '".\n'..error)
+ return
+ end
+ csscontent = css:read("*all")
+ css:close()
end
local status, error = fs.fill(args.dir..fs.separator..cssfilename, csscontent)
if not status then
- print(error)
- return
+ print(error)
+ return
end
print('Adding css')
print('Done')
diff --git a/libraries/luadocumentor/template/index.lua b/libraries/luadocumentor/template/index.lua
index 8b3142f..28c0605 100644
--- a/libraries/luadocumentor/template/index.lua
+++ b/libraries/luadocumentor/template/index.lua
@@ -14,15 +14,15 @@
#if _index.modules then
<div id="content">
<h2>Module$( #_index.modules > 1 and 's' )</h2>
- <table class="module_list">
-# for _, module in sortedpairs( _index.modules ) do
-# if module.tag ~= 'index' then
- <tr>
- <td class="name" nowrap="nowrap">$( fulllinkto(module) )</td>
- <td class="summary">$( module.description and format(module.shortdescription) )</td>
- </tr>
-# end
-# end
- </table>
+ <table class="module_list">
+# for _, module in sortedpairs( _index.modules ) do
+# if module.tag ~= 'index' then
+ <tr>
+ <td class="name" nowrap="nowrap">$( fulllinkto(module) )</td>
+ <td class="summary">$( module.description and format(module.shortdescription) )</td>
+ </tr>
+# end
+# end
+ </table>
</div>
#end ]]
diff --git a/libraries/luadocumentor/template/page.lua b/libraries/luadocumentor/template/page.lua
index 4fce730..d233efc 100644
--- a/libraries/luadocumentor/template/page.lua
+++ b/libraries/luadocumentor/template/page.lua
@@ -13,55 +13,55 @@
[[<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
#if _page.headers and #_page.headers > 0 then
- <head>
-# for _, header in ipairs(_page.headers) do
- $(header)
-# end
- </head>
+ <head>
+# for _, header in ipairs(_page.headers) do
+ $(header)
+# end
+ </head>
#end
<body>
<div id="container">
<div id="product">
- <div id="product_logo"></div>
- <div id="product_name"><big><b></b></big></div>
- <div id="product_description"></div>
+ <div id="product_logo"></div>
+ <div id="product_name"><big><b></b></big></div>
+ <div id="product_description"></div>
</div>
<div id="main">
# --
# -- Generating lateral menu
# --
- <div id="navigation">
-# local index = 'index'
-# if _page.modules then
- <h2>Modules</h2>
-# -- Check if an index is defined
-# if _page.modules [ index ] then
-# local module = _page.modules [ index ]
- <ul><li>
-# if module ~= _page.currentmodule then
- <a href="$( linkto(module) )">$(module.name)</a>
-# else
- $(module.name)
-# end
- </li></ul>
-# end
+ <div id="navigation">
+# local index = 'index'
+# if _page.modules then
+ <h2>Modules</h2>
+# -- Check if an index is defined
+# if _page.modules [ index ] then
+# local module = _page.modules [ index ]
+ <ul><li>
+# if module ~= _page.currentmodule then
+ <a href="$( linkto(module) )">$(module.name)</a>
+# else
+ $(module.name)
+# end
+ </li></ul>
+# end
#
- <ul>
-# -- Generating links for all modules
-# for _, module in sortedpairs( _page.modules ) do
-# -- Except for current one
-# if module.name ~= index then
-# if module ~= _page.currentmodule then
- <li><a href="$( linkto(module) )">$(module.name)</a></li>
-# else
- <li>$(module.name)</li>
-# end
-# end
-# end
- </ul>
-# end
- </div>
- $( applytemplate(_page.currentmodule) )
+ <ul>
+# -- Generating links for all modules
+# for _, module in sortedpairs( _page.modules ) do
+# -- Except for current one
+# if module.name ~= index then
+# if module ~= _page.currentmodule then
+ <li><a href="$( linkto(module) )">$(module.name)</a></li>
+# else
+ <li>$(module.name)</li>
+# end
+# end
+# end
+ </ul>
+# end
+ </div>
+ $( applytemplate(_page.currentmodule) )
</div>
</body>
</html>
diff --git a/libraries/modelsbuilder/models/apimodel.lua b/libraries/modelsbuilder/models/apimodel.lua
index ff89a80..dda4f9d 100644
--- a/libraries/modelsbuilder/models/apimodel.lua
+++ b/libraries/modelsbuilder/models/apimodel.lua
@@ -1,255 +1,255 @@
---------------------------------------------------------------------------------
--- Copyright (c) 2011-2012 Sierra Wireless.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Simon BERNARD <sbernard@sierrawireless.com>
--- - initial API and implementation and initial documentation
---------------------------------------------------------------------------------
-local M = {}
-
---------------------------------------------------------------------------------
--- API MODEL
---------------------------------------------------------------------------------
-
-function M._file()
- local file = {
- -- FIELDS
- tag = "file",
- name = nil, -- string
- shortdescription = "", -- string
- description = "", -- string
- types = {}, -- map from typename to type
- globalvars = {}, -- map from varname to item
- returns = {}, -- list of return
-
- -- FUNCTIONS
- addtype = function (self,type)
- self.types[type.name] = type
- type.parent = self
- end,
-
- mergetype = function (self,newtype,erase,erasesourcerangefield)
- local currenttype = self.types[newtype.name]
- if currenttype then
- -- merge recordtypedef
- if currenttype.tag =="recordtypedef" and newtype.tag == "recordtypedef" then
- -- merge fields
- for fieldname ,field in pairs( newtype.fields) do
- local currentfield = currenttype.fields[fieldname]
- if erase or not currentfield then
- currenttype:addfield(field)
- elseif erasesourcerangefield then
- if field.sourcerange.min and field.sourcerange.max then
- currentfield.sourcerange.min = field.sourcerange.min
- currentfield.sourcerange.max = field.sourcerange.max
- end
- end
- end
-
- -- merge descriptions and source ranges
- if erase then
- if newtype.description or newtype.description == "" then currenttype.description = newtype.description end
- if newtype.shortdescription or newtype.shortdescription == "" then currenttype.shortdescription = newtype.shortdescription end
- if newtype.sourcerange.min and newtype.sourcerange.max then
- currenttype.sourcerange.min = newtype.sourcerange.min
- currenttype.sourcerange.max = newtype.sourcerange.max
- end
- end
- -- merge functiontypedef
- elseif currenttype.tag == "functiontypedef" and newtype.tag == "functiontypedef" then
- -- merge params
- for i, param1 in ipairs(newtype.params) do
- local missing = true
- for j, param2 in ipairs(currenttype.params) do
- if param1.name == param2.name then
- missing = false
- break
- end
- end
- if missing then
- table.insert(currenttype.params,param1)
- end
- end
-
- -- merge descriptions and source ranges
- if erase then
- if newtype.description or newtype.description == "" then currenttype.description = newtype.description end
- if newtype.shortdescription or newtype.shortdescription == "" then currenttype.shortdescription = newtype.shortdescription end
- if newtype.sourcerange.min and newtype.sourcerange.max then
- currenttype.sourcerange.min = newtype.sourcerange.min
- currenttype.sourcerange.max = newtype.sourcerange.max
- end
- end
- end
- else
- self:addtype(newtype)
- end
- end,
-
- addglobalvar = function (self,item)
- self.globalvars[item.name] = item
- item.parent = self
- end,
-
- moduletyperef = function (self)
- if self and self.returns[1] and self.returns[1].types[1] then
- local typeref = self.returns[1].types[1]
- return typeref
- end
- end
- }
- return file
-end
-
-function M._recordtypedef(name)
- local recordtype = {
- -- FIELDS
- tag = "recordtypedef",
- name = name, -- string (mandatory)
- shortdescription = "", -- string
- description = "", -- string
- fields = {}, -- map from fieldname to field
- sourcerange = {min=0,max=0},
- supertype = nil, -- supertype of the type def (inheritance), should be a recordtypedef ref
- defaultkeytyperef = nil, -- the default typeref of key
- defaultvaluetyperef = nil, -- the default typeref of value
- structurekind = nil, -- kind of structure of the type: could be nil, "map" or "list"
- structuredescription = nil, -- description of the structure
- call = nil, -- typeref to the function use as __call on the type.
- -- FUNCTIONS
- addfield = function (self,field)
- self.fields[field.name] = field
- field.parent = self
- end,
-
- getcalldef = function( self)
- if self.call and self.call.tag == 'internaltyperef' then
- if self.parent and self.parent.tag == 'file' then
- local file = self.parent
- return file.types[self.call.typename]
- end
- end
- end
- }
- return recordtype
-end
-
-function M._functiontypedef(name)
- return {
- tag = "functiontypedef",
- name = name, -- string (mandatory)
- shortdescription = "", -- string
- description = "", -- string
- params = {}, -- list of parameter
- returns = {} -- list of return
- }
-end
-
-function M._parameter(name)
- return {
- tag = "parameter",
- name = name, -- string (mandatory)
- description = "", -- string
- type = nil -- typeref (external or internal or primitive typeref)
- }
-end
-
-function M._item(name)
- return {
- -- FIELDS
- tag = "item",
- name = name, -- string (mandatory)
- shortdescription = "", -- string
- description = "", -- string
- type = nil, -- typeref (external or internal or primitive typeref)
- occurrences = {}, -- list of identifier (see internalmodel)
- sourcerange = {min=0, max=0},
-
- -- This is A TRICK
- -- This value is ALWAYS nil, except for internal purposes (short references).
- external = nil,
-
- -- FUNCTIONS
- addoccurence = function (self,occ)
- table.insert(self.occurrences,occ)
- occ.definition = self
- end,
-
- resolvetype = function (self,file)
- if self and self.type then
- if self.type.tag =="internaltyperef" then
- -- if file is not given try to retrieve it.
- if not file then
- if self.parent and self.parent.tag == 'recordtypedef' then
- file = self.parent.parent
- elseif self.parent.tag == 'file' then
- file = self.parent
- end
- end
- if file then return file.types[self.type.typename] end
- elseif self.type.tag =="inlinetyperef" then
- return self.type.def
- end
- end
- end
- }
-end
-
-function M._externaltypref(modulename, typename)
- return {
- tag = "externaltyperef",
- modulename = modulename, -- string
- typename = typename -- string
- }
-end
-
-function M._internaltyperef(typename)
- return {
- tag = "internaltyperef",
- typename = typename -- string
- }
-end
-
-function M._primitivetyperef(typename)
- return {
- tag = "primitivetyperef",
- typename = typename -- string
- }
-end
-
-function M._moduletyperef(modulename,returnposition)
- return {
- tag = "moduletyperef",
- modulename = modulename, -- string
- returnposition = returnposition -- number
- }
-end
-
-function M._exprtyperef(expression,returnposition)
- return {
- tag = "exprtyperef",
- expression = expression, -- expression (see internal model)
- returnposition = returnposition -- number
- }
-end
-
-function M._inlinetyperef(definition)
- return {
- tag = "inlinetyperef",
- def = definition, -- expression (see internal model)
-
- }
-end
-
-function M._return(description)
- return {
- tag = "return",
- description = description or "", -- string
- types = {} -- list of typref (external or internal or primitive typeref)
- }
-end
-return M
+--------------------------------------------------------------------------------
+-- Copyright (c) 2011-2012 Sierra Wireless.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Simon BERNARD <sbernard@sierrawireless.com>
+-- - initial API and implementation and initial documentation
+--------------------------------------------------------------------------------
+local M = {}
+
+--------------------------------------------------------------------------------
+-- API MODEL
+--------------------------------------------------------------------------------
+
+function M._file()
+ local file = {
+ -- FIELDS
+ tag = "file",
+ name = nil, -- string
+ shortdescription = "", -- string
+ description = "", -- string
+ types = {}, -- map from typename to type
+ globalvars = {}, -- map from varname to item
+ returns = {}, -- list of return
+
+ -- FUNCTIONS
+ addtype = function (self,type)
+ self.types[type.name] = type
+ type.parent = self
+ end,
+
+ mergetype = function (self,newtype,erase,erasesourcerangefield)
+ local currenttype = self.types[newtype.name]
+ if currenttype then
+ -- merge recordtypedef
+ if currenttype.tag =="recordtypedef" and newtype.tag == "recordtypedef" then
+ -- merge fields
+ for fieldname ,field in pairs( newtype.fields) do
+ local currentfield = currenttype.fields[fieldname]
+ if erase or not currentfield then
+ currenttype:addfield(field)
+ elseif erasesourcerangefield then
+ if field.sourcerange.min and field.sourcerange.max then
+ currentfield.sourcerange.min = field.sourcerange.min
+ currentfield.sourcerange.max = field.sourcerange.max
+ end
+ end
+ end
+
+ -- merge descriptions and source ranges
+ if erase then
+ if newtype.description or newtype.description == "" then currenttype.description = newtype.description end
+ if newtype.shortdescription or newtype.shortdescription == "" then currenttype.shortdescription = newtype.shortdescription end
+ if newtype.sourcerange.min and newtype.sourcerange.max then
+ currenttype.sourcerange.min = newtype.sourcerange.min
+ currenttype.sourcerange.max = newtype.sourcerange.max
+ end
+ end
+ -- merge functiontypedef
+ elseif currenttype.tag == "functiontypedef" and newtype.tag == "functiontypedef" then
+ -- merge params
+ for i, param1 in ipairs(newtype.params) do
+ local missing = true
+ for j, param2 in ipairs(currenttype.params) do
+ if param1.name == param2.name then
+ missing = false
+ break
+ end
+ end
+ if missing then
+ table.insert(currenttype.params,param1)
+ end
+ end
+
+ -- merge descriptions and source ranges
+ if erase then
+ if newtype.description or newtype.description == "" then currenttype.description = newtype.description end
+ if newtype.shortdescription or newtype.shortdescription == "" then currenttype.shortdescription = newtype.shortdescription end
+ if newtype.sourcerange.min and newtype.sourcerange.max then
+ currenttype.sourcerange.min = newtype.sourcerange.min
+ currenttype.sourcerange.max = newtype.sourcerange.max
+ end
+ end
+ end
+ else
+ self:addtype(newtype)
+ end
+ end,
+
+ addglobalvar = function (self,item)
+ self.globalvars[item.name] = item
+ item.parent = self
+ end,
+
+ moduletyperef = function (self)
+ if self and self.returns[1] and self.returns[1].types[1] then
+ local typeref = self.returns[1].types[1]
+ return typeref
+ end
+ end
+ }
+ return file
+end
+
+function M._recordtypedef(name)
+ local recordtype = {
+ -- FIELDS
+ tag = "recordtypedef",
+ name = name, -- string (mandatory)
+ shortdescription = "", -- string
+ description = "", -- string
+ fields = {}, -- map from fieldname to field
+ sourcerange = {min=0,max=0},
+ supertype = nil, -- supertype of the type def (inheritance), should be a recordtypedef ref
+ defaultkeytyperef = nil, -- the default typeref of key
+ defaultvaluetyperef = nil, -- the default typeref of value
+ structurekind = nil, -- kind of structure of the type: could be nil, "map" or "list"
+ structuredescription = nil, -- description of the structure
+ call = nil, -- typeref to the function use as __call on the type.
+ -- FUNCTIONS
+ addfield = function (self,field)
+ self.fields[field.name] = field
+ field.parent = self
+ end,
+
+ getcalldef = function( self)
+ if self.call and self.call.tag == 'internaltyperef' then
+ if self.parent and self.parent.tag == 'file' then
+ local file = self.parent
+ return file.types[self.call.typename]
+ end
+ end
+ end
+ }
+ return recordtype
+end
+
+function M._functiontypedef(name)
+ return {
+ tag = "functiontypedef",
+ name = name, -- string (mandatory)
+ shortdescription = "", -- string
+ description = "", -- string
+ params = {}, -- list of parameter
+ returns = {} -- list of return
+ }
+end
+
+function M._parameter(name)
+ return {
+ tag = "parameter",
+ name = name, -- string (mandatory)
+ description = "", -- string
+ type = nil -- typeref (external or internal or primitive typeref)
+ }
+end
+
+function M._item(name)
+ return {
+ -- FIELDS
+ tag = "item",
+ name = name, -- string (mandatory)
+ shortdescription = "", -- string
+ description = "", -- string
+ type = nil, -- typeref (external or internal or primitive typeref)
+ occurrences = {}, -- list of identifier (see internalmodel)
+ sourcerange = {min=0, max=0},
+
+ -- This is A TRICK
+ -- This value is ALWAYS nil, except for internal purposes (short references).
+ external = nil,
+
+ -- FUNCTIONS
+ addoccurence = function (self,occ)
+ table.insert(self.occurrences,occ)
+ occ.definition = self
+ end,
+
+ resolvetype = function (self,file)
+ if self and self.type then
+ if self.type.tag =="internaltyperef" then
+ -- if file is not given try to retrieve it.
+ if not file then
+ if self.parent and self.parent.tag == 'recordtypedef' then
+ file = self.parent.parent
+ elseif self.parent.tag == 'file' then
+ file = self.parent
+ end
+ end
+ if file then return file.types[self.type.typename] end
+ elseif self.type.tag =="inlinetyperef" then
+ return self.type.def
+ end
+ end
+ end
+ }
+end
+
+function M._externaltypref(modulename, typename)
+ return {
+ tag = "externaltyperef",
+ modulename = modulename, -- string
+ typename = typename -- string
+ }
+end
+
+function M._internaltyperef(typename)
+ return {
+ tag = "internaltyperef",
+ typename = typename -- string
+ }
+end
+
+function M._primitivetyperef(typename)
+ return {
+ tag = "primitivetyperef",
+ typename = typename -- string
+ }
+end
+
+function M._moduletyperef(modulename,returnposition)
+ return {
+ tag = "moduletyperef",
+ modulename = modulename, -- string
+ returnposition = returnposition -- number
+ }
+end
+
+function M._exprtyperef(expression,returnposition)
+ return {
+ tag = "exprtyperef",
+ expression = expression, -- expression (see internal model)
+ returnposition = returnposition -- number
+ }
+end
+
+function M._inlinetyperef(definition)
+ return {
+ tag = "inlinetyperef",
+ def = definition, -- expression (see internal model)
+
+ }
+end
+
+function M._return(description)
+ return {
+ tag = "return",
+ description = description or "", -- string
+ types = {} -- list of typref (external or internal or primitive typeref)
+ }
+end
+return M
diff --git a/libraries/modelsbuilder/models/apimodelbuilder.lua b/libraries/modelsbuilder/models/apimodelbuilder.lua
index fcb83f7..a6cf489 100644
--- a/libraries/modelsbuilder/models/apimodelbuilder.lua
+++ b/libraries/modelsbuilder/models/apimodelbuilder.lua
@@ -1,545 +1,545 @@
---------------------------------------------------------------------------------
--- Copyright (c) 2011-2012 Sierra Wireless.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Simon BERNARD <sbernard@sierrawireless.com>
--- - initial API and implementation and initial documentation
---------------------------------------------------------------------------------
-local apimodel = require "models.apimodel"
-local ldp = require "models.ldparser"
-local Q = require "metalua.treequery"
-
-local M = {}
-
-local handledcomments={} -- cache to know the comment already handled
-
-----
--- UTILITY METHODS
-local primitivetypes = {
- ['boolean'] = true,
- ['function'] = true,
- ['nil'] = true,
- ['number'] = true,
- ['string'] = true,
- ['thread'] = true,
- ['userdata'] = true,
- ['list'] = true,
- ['map'] = true,
- ['any'] = true
-
-}
-
--- get or create the typedef with the name "name"
-local function gettypedef(_file,name,kind,sourcerangemin,sourcerangemax)
- local kind = kind or "recordtypedef"
- local _typedef = _file.types[name]
- if _typedef then
- if _typedef.tag == kind then return _typedef end
- else
- if kind == "recordtypedef" and name ~= "global" then
- local _recordtypedef = apimodel._recordtypedef(name)
-
- -- define sourcerange
- _recordtypedef.sourcerange.min = sourcerangemin
- _recordtypedef.sourcerange.max = sourcerangemax
-
- -- add to file if a name is defined
- if _recordtypedef.name then _file:addtype(_recordtypedef) end
- return _recordtypedef
- elseif kind == "functiontypedef" then
- -- TODO support function
- return nil
- else
- return nil
- end
- end
- return nil
-end
-
-
--- create a typeref from the typref doc_tag
-local function createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
- local _typeref
- if dt_typeref.tag == "typeref" then
- if dt_typeref.module then
- -- manage external type
- _typeref = apimodel._externaltypref()
- _typeref.modulename = dt_typeref.module
- _typeref.typename = dt_typeref.type
- else
- if dt_typeref.type == "table" then
- -- manage special table type
- _typeref = apimodel._inlinetyperef(apimodel._recordtypedef("table"))
- elseif dt_typeref.type == "list" or dt_typeref.type == "map" then
- -- manage structures
- local structuretypedef = apimodel._recordtypedef(dt_typeref)
- structuretypedef.defaultvaluetyperef = createtyperef(dt_typeref.valuetype)
- if dt_typeref.type == "map" then
- structuretypedef.defaultkeytyperef = createtyperef(dt_typeref.keytype)
- end
- structuretypedef.structurekind = dt_typeref.type
- structuretypedef.name = dt_typeref.type
- _typeref = apimodel._inlinetyperef(structuretypedef)
- elseif primitivetypes[dt_typeref.type] then
- -- manage primitive types
- _typeref = apimodel._primitivetyperef()
- _typeref.typename = dt_typeref.type
- else
- -- manage internal type
- _typeref = apimodel._internaltyperef()
- _typeref.typename = dt_typeref.type
- if _file then
- gettypedef(_file, _typeref.typename, "recordtypedef", sourcerangemin,sourcerangemax)
- end
- end
- end
- end
- return _typeref
-end
-
--- create a return from the return doc_tag
-local function createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
- local _return = apimodel._return()
-
- _return.description = dt_return.description
-
- -- manage typeref
- if dt_return.types then
- for _, dt_typeref in ipairs(dt_return.types) do
- local _typeref = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
- if _typeref then
- table.insert(_return.types,_typeref)
- end
- end
- end
- return _return
-end
-
--- create a item from the field doc_tag
-local function createfield(dt_field,_file,sourcerangemin,sourcerangemax)
- local _item = apimodel._item(dt_field.name)
-
- if dt_field.shortdescription then
- _item.shortdescription = dt_field.shortdescription
- _item.description = dt_field.description
- else
- _item.shortdescription = dt_field.description
- end
-
- -- manage typeref
- local dt_typeref = dt_field.type
- if dt_typeref then
- _item.type = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
- end
- return _item
-end
-
--- create a param from the param doc_tag
-local function createparam(dt_param,_file,sourcerangemin,sourcerangemax)
- if not dt_param.name then return nil end
-
- local _parameter = apimodel._parameter(dt_param.name)
- _parameter.description = dt_param.description
-
- -- manage typeref
- local dt_typeref = dt_param.type
- if dt_typeref then
- _parameter.type = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
- end
- return _parameter
-end
-
--- get or create the typedef with the name "name"
-function M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
- if scope and not scope.module then
- if _item.name then
- if scope.type == "global" then
- _file:addglobalvar(_item)
- else
- local _recordtypedef = gettypedef (_file, scope.type ,"recordtypedef",sourcerangemin,sourcerangemax)
- _recordtypedef:addfield(_item)
- end
- else
- -- if no item name precise we store the scope in the item to be able to add it to the right parent later
- _item.scope = scope
- end
- end
-end
-
--- Function type counter
-local i = 0
-
--- Reset function type counter
-local function resetfunctiontypeidgenerator()
- i = 0
-end
-
--- Provides an unique index for a function type
-local function generatefunctiontypeid()
- i = i + 1
- return i
-end
-
--- generate a function type name
-local function generatefunctiontypename(_functiontypedef)
- local name = {"__"}
- if _functiontypedef.returns and _functiontypedef.returns[1] then
- local ret = _functiontypedef.returns[1]
- for _, type in ipairs(ret.types) do
- if type.typename then
- if type.modulename then
- table.insert(name,type.modulename)
- end
- table.insert(name,"#")
- table.insert(name,type.typename)
- end
- end
-
- end
- table.insert(name,"=")
- if _functiontypedef.params then
- for _, param in ipairs(_functiontypedef.params) do
- local type = param.type
- if type then
- if type.typename then
- if type.modulename then
- table.insert(name,type.modulename)
- end
- table.insert(name,"#")
- table.insert(name,type.typename)
- else
- table.insert(name,"#unknown")
- end
- end
- table.insert(name,"[")
- table.insert(name,param.name)
- table.insert(name,"]")
- end
- end
- table.insert(name,"__")
- table.insert(name, generatefunctiontypeid())
- return table.concat(name)
-end
-
---
--- Store user defined tags
---
-local function attachmetadata(apiobj, parsedcomment)
- local thirdtags = parsedcomment and parsedcomment.unknowntags
- if thirdtags then
- -- Define a storage index for user defined tags on current API element
- if not apiobj.metadata then apiobj.metadata = {} end
-
- -- Loop over user defined tags
- for usertag, taglist in pairs(thirdtags) do
- if not apiobj.metadata[ usertag ] then
- apiobj.metadata[ usertag ] = {
- tag = usertag
- }
- end
- for _, tag in ipairs( taglist ) do
- table.insert(apiobj.metadata[usertag], tag)
- end
- end
- end
-end
-
-
-------------------------------------------------------
--- create the module api
-function M.createmoduleapi(ast,modulename)
-
- -- Initialise function type naming
- resetfunctiontypeidgenerator()
-
- local _file = apimodel._file()
-
- local _comment2apiobj = {}
-
- local function handlecomment(comment)
-
- -- Extract information from tagged comments
- local parsedcomment = ldp.parse(comment[1])
- if not parsedcomment then return nil end
-
- -- Get tags from the languages
- local regulartags = parsedcomment.tags
-
- -- Will contain last API object generated from comments
- local _lastapiobject
-
- -- if comment is an ld comment
- if regulartags then
- -- manage "module" comment
- if regulartags["module"] then
- -- get name
- _file.name = regulartags["module"][1].name or modulename
- _lastapiobject = _file
-
- -- manage descriptions
- _file.shortdescription = parsedcomment.shortdescription
- _file.description = parsedcomment.description
-
- local sourcerangemin = comment.lineinfo.first.offset
- local sourcerangemax = comment.lineinfo.last.offset
-
- -- manage returns
- if regulartags ["return"] then
- for _, dt_return in ipairs(regulartags ["return"]) do
- local _return = createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
- table.insert(_file.returns,_return)
- end
- end
- -- if no returns on module create a defaultreturn of type #modulename
- if #_file.returns == 0 and _file.name then
- -- create internal type ref
- local _typeref = apimodel._internaltyperef()
- _typeref.typename = _file.name
-
- -- create return
- local _return = apimodel._return()
- table.insert(_return.types,_typeref)
-
- -- add return
- table.insert(_file.returns,_return)
-
- --create recordtypedef is not define
- local _moduletypedef = gettypedef(_file,_typeref.typename,"recordtypedef",sourcerangemin,sourcerangemax)
-
- -- manage extends (inheritance) and structure tags
- if _moduletypedef and _moduletypedef.tag == "recordtypedef" then
- if regulartags["extends"] and regulartags["extends"][1] then
- local supertype = regulartags["extends"][1].type
- if supertype then _moduletypedef.supertype = createtyperef(supertype) end
- end
- if regulartags["map"] and regulartags["map"][1] then
- local keytype = regulartags["map"][1].keytype
- local valuetype = regulartags["map"][1].valuetype
- if keytype and valuetype then
- _moduletypedef.defaultkeytyperef = createtyperef(keytype)
- _moduletypedef.defaultvaluetyperef = createtyperef(valuetype)
- _moduletypedef.structurekind = "map"
- _moduletypedef.structuredescription = regulartags["map"][1].description
- end
- elseif regulartags["list"] and regulartags["list"][1] then
- local type = regulartags["list"][1].type
- if type then
- _moduletypedef.defaultvaluetyperef = createtyperef(type)
- _moduletypedef.structurekind = "list"
- _moduletypedef.structuredescription = regulartags["list"][1].description
- end
- end
- end
- end
- -- manage "type" comment
- elseif regulartags["type"] and regulartags["type"][1].name ~= "global" then
- local dt_type = regulartags["type"][1];
- -- create record type if it doesn't exist
- local sourcerangemin = comment.lineinfo.first.offset
- local sourcerangemax = comment.lineinfo.last.offset
- local _recordtypedef = gettypedef (_file, dt_type.name ,"recordtypedef",sourcerangemin,sourcerangemax)
- _lastapiobject = _recordtypedef
-
- -- re-set sourcerange in case the type was created before the type tag
- _recordtypedef.sourcerange.min = sourcerangemin
- _recordtypedef.sourcerange.max = sourcerangemax
-
- -- manage description
- _recordtypedef.shortdescription = parsedcomment.shortdescription
- _recordtypedef.description = parsedcomment.description
-
- -- manage fields
- if regulartags["field"] then
- for _, dt_field in ipairs(regulartags["field"]) do
- local _item = createfield(dt_field,_file,sourcerangemin,sourcerangemax)
- -- define sourcerange only if we create it
- _item.sourcerange.min = sourcerangemin
- _item.sourcerange.max = sourcerangemax
- if _item and _item.name then
- _recordtypedef:addfield(_item) end
- end
- end
-
- -- manage extends (inheritance)
- if regulartags["extends"] and regulartags["extends"][1] then
- local supertype = regulartags["extends"][1].type
- if supertype then _recordtypedef.supertype = createtyperef(supertype) end
- end
-
- -- manage structure tag
- if regulartags["map"] and regulartags["map"][1] then
- local keytype = regulartags["map"][1].keytype
- local valuetype = regulartags["map"][1].valuetype
- if keytype and valuetype then
- _recordtypedef.defaultkeytyperef = createtyperef(keytype)
- _recordtypedef.defaultvaluetyperef = createtyperef(valuetype)
- _recordtypedef.structurekind = "map"
- _recordtypedef.structuredescription = regulartags["map"][1].description
- end
- elseif regulartags["list"] and regulartags["list"][1] then
- local type = regulartags["list"][1].type
- if type then
- _recordtypedef.defaultvaluetyperef = createtyperef(type)
- _recordtypedef.structurekind = "list"
- _recordtypedef.structuredescription = regulartags["list"][1].description
- end
- end
- elseif regulartags["field"] then
- local dt_field = regulartags["field"][1]
-
- -- create item
- local sourcerangemin = comment.lineinfo.first.offset
- local sourcerangemax = comment.lineinfo.last.offset
- local _item = createfield(dt_field,_file,sourcerangemin,sourcerangemax)
- _item.shortdescription = parsedcomment.shortdescription
- _item.description = parsedcomment.description
- _lastapiobject = _item
-
- -- define sourcerange
- _item.sourcerange.min = sourcerangemin
- _item.sourcerange.max = sourcerangemax
-
- -- add item to its parent
- local scope = regulartags["field"][1].parent
- M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
- elseif regulartags["function"] or regulartags["param"] or regulartags["return"] or regulartags["callof"] then
- -- create item
- local _item = apimodel._item()
- _item.shortdescription = parsedcomment.shortdescription
- _item.description = parsedcomment.description
- _lastapiobject = _item
-
- -- set name
- if regulartags["function"] then _item.name = regulartags["function"][1].name end
-
- -- define sourcerange
- local sourcerangemin = comment.lineinfo.first.offset
- local sourcerangemax = comment.lineinfo.last.offset
- _item.sourcerange.min = sourcerangemin
- _item.sourcerange.max = sourcerangemax
-
-
- -- create function type
- local _functiontypedef = apimodel._functiontypedef()
- _functiontypedef.shortdescription = parsedcomment.shortdescription
- _functiontypedef.description = parsedcomment.description
-
-
- -- manage params
- if regulartags["param"] then
- for _, dt_param in ipairs(regulartags["param"]) do
- local _param = createparam(dt_param,_file,sourcerangemin,sourcerangemax)
- table.insert(_functiontypedef.params,_param)
- end
- end
-
- -- manage returns
- if regulartags["return"] then
- for _, dt_return in ipairs(regulartags["return"]) do
- local _return = createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
- table.insert(_functiontypedef.returns,_return)
- end
- end
-
- -- add type name
- _functiontypedef.name = generatefunctiontypename(_functiontypedef)
- attachmetadata(_functiontypedef, parsedcomment)
- _file:addtype(_functiontypedef)
-
- -- create ref to this type
- local _internaltyperef = apimodel._internaltyperef()
- _internaltyperef.typename = _functiontypedef.name
- _item.type=_internaltyperef
-
- -- add item to its parent
- local sourcerangemin = comment.lineinfo.first.offset
- local sourcerangemax = comment.lineinfo.last.offset
- local scope = (regulartags["function"] and regulartags["function"][1].parent) or nil
- M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
-
- -- manage callof
- if regulartags["callof"] and regulartags["callof"][1] and regulartags["callof"][1].type then
- -- get the type which will be callable !
- local _internaltyperef = createtyperef(regulartags["callof"][1].type)
- if _internaltyperef and _internaltyperef.tag == "internaltyperef" then
- local _typedeftypedef = gettypedef(_file,_internaltyperef.typename,"recordtypedef",sourcerangemin,sourcerangemax)
- if _typedeftypedef then
- -- refer the function used when the type is called
- local _internaltyperef = apimodel._internaltyperef()
- _internaltyperef.typename = _functiontypedef.name
- _typedeftypedef.call =_internaltyperef
- end
- end
- end
- end
- end
-
- -- when we could not know which type of api object it is, we suppose this is an item
- if not _lastapiobject then
- _lastapiobject = apimodel._item()
- _lastapiobject.shortdescription = parsedcomment.shortdescription
- _lastapiobject.description = parsedcomment.description
- _lastapiobject.sourcerange.min = comment.lineinfo.first.offset
- _lastapiobject.sourcerange.max = comment.lineinfo.last.offset
- end
-
- attachmetadata(_lastapiobject, parsedcomment)
-
- -- if we create an api object linked it to
- _comment2apiobj[comment] =_lastapiobject
- end
-
- local function parsecomment(node, parent, ...)
- -- check for comments before this node
- if node.lineinfo and node.lineinfo.first.comments then
- local comments = node.lineinfo.first.comments
- -- check all comments
- for _,comment in ipairs(comments) do
- -- if not already handled
- if not handledcomments[comment] then
- handlecomment(comment)
- handledcomments[comment]=true
- end
- end
- end
- -- check for comments after this node
- if node.lineinfo and node.lineinfo.last.comments then
- local comments = node.lineinfo.last.comments
- -- check all comments
- for _,comment in ipairs(comments) do
- -- if not already handled
- if not handledcomments[comment] then
- handlecomment(comment)
- handledcomments[comment]=true
- end
- end
- end
- end
- Q(ast):filter(function(x) return x.tag~=nil end):foreach(parsecomment)
- return _file, _comment2apiobj
-end
-
-
-function M.extractlocaltype ( commentblock,_file)
- if not commentblock then return nil end
-
- local stringcomment = commentblock[1]
-
- local parsedtag = ldp.parseinlinecomment(stringcomment)
- if parsedtag then
- local sourcerangemin = commentblock.lineinfo.first.offset
- local sourcerangemax = commentblock.lineinfo.last.offset
-
- return createtyperef(parsedtag,_file,sourcerangemin,sourcerangemax), parsedtag.description
- end
-
- return nil, stringcomment
-end
-
-M.generatefunctiontypename = generatefunctiontypename
-
-return M
+--------------------------------------------------------------------------------
+-- Copyright (c) 2011-2012 Sierra Wireless.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Simon BERNARD <sbernard@sierrawireless.com>
+-- - initial API and implementation and initial documentation
+--------------------------------------------------------------------------------
+local apimodel = require "models.apimodel"
+local ldp = require "models.ldparser"
+local Q = require "metalua.treequery"
+
+local M = {}
+
+local handledcomments={} -- cache to know the comment already handled
+
+----
+-- UTILITY METHODS
+local primitivetypes = {
+ ['boolean'] = true,
+ ['function'] = true,
+ ['nil'] = true,
+ ['number'] = true,
+ ['string'] = true,
+ ['thread'] = true,
+ ['userdata'] = true,
+ ['list'] = true,
+ ['map'] = true,
+ ['any'] = true
+
+}
+
+-- get or create the typedef with the name "name"
+local function gettypedef(_file,name,kind,sourcerangemin,sourcerangemax)
+ local kind = kind or "recordtypedef"
+ local _typedef = _file.types[name]
+ if _typedef then
+ if _typedef.tag == kind then return _typedef end
+ else
+ if kind == "recordtypedef" and name ~= "global" then
+ local _recordtypedef = apimodel._recordtypedef(name)
+
+ -- define sourcerange
+ _recordtypedef.sourcerange.min = sourcerangemin
+ _recordtypedef.sourcerange.max = sourcerangemax
+
+ -- add to file if a name is defined
+ if _recordtypedef.name then _file:addtype(_recordtypedef) end
+ return _recordtypedef
+ elseif kind == "functiontypedef" then
+ -- TODO support function
+ return nil
+ else
+ return nil
+ end
+ end
+ return nil
+end
+
+
+-- create a typeref from the typref doc_tag
+local function createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
+ local _typeref
+ if dt_typeref.tag == "typeref" then
+ if dt_typeref.module then
+ -- manage external type
+ _typeref = apimodel._externaltypref()
+ _typeref.modulename = dt_typeref.module
+ _typeref.typename = dt_typeref.type
+ else
+ if dt_typeref.type == "table" then
+ -- manage special table type
+ _typeref = apimodel._inlinetyperef(apimodel._recordtypedef("table"))
+ elseif dt_typeref.type == "list" or dt_typeref.type == "map" then
+ -- manage structures
+ local structuretypedef = apimodel._recordtypedef(dt_typeref)
+ structuretypedef.defaultvaluetyperef = createtyperef(dt_typeref.valuetype)
+ if dt_typeref.type == "map" then
+ structuretypedef.defaultkeytyperef = createtyperef(dt_typeref.keytype)
+ end
+ structuretypedef.structurekind = dt_typeref.type
+ structuretypedef.name = dt_typeref.type
+ _typeref = apimodel._inlinetyperef(structuretypedef)
+ elseif primitivetypes[dt_typeref.type] then
+ -- manage primitive types
+ _typeref = apimodel._primitivetyperef()
+ _typeref.typename = dt_typeref.type
+ else
+ -- manage internal type
+ _typeref = apimodel._internaltyperef()
+ _typeref.typename = dt_typeref.type
+ if _file then
+ gettypedef(_file, _typeref.typename, "recordtypedef", sourcerangemin,sourcerangemax)
+ end
+ end
+ end
+ end
+ return _typeref
+end
+
+-- create a return from the return doc_tag
+local function createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
+ local _return = apimodel._return()
+
+ _return.description = dt_return.description
+
+ -- manage typeref
+ if dt_return.types then
+ for _, dt_typeref in ipairs(dt_return.types) do
+ local _typeref = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
+ if _typeref then
+ table.insert(_return.types,_typeref)
+ end
+ end
+ end
+ return _return
+end
+
+-- create a item from the field doc_tag
+local function createfield(dt_field,_file,sourcerangemin,sourcerangemax)
+ local _item = apimodel._item(dt_field.name)
+
+ if dt_field.shortdescription then
+ _item.shortdescription = dt_field.shortdescription
+ _item.description = dt_field.description
+ else
+ _item.shortdescription = dt_field.description
+ end
+
+ -- manage typeref
+ local dt_typeref = dt_field.type
+ if dt_typeref then
+ _item.type = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
+ end
+ return _item
+end
+
+-- create a param from the param doc_tag
+local function createparam(dt_param,_file,sourcerangemin,sourcerangemax)
+ if not dt_param.name then return nil end
+
+ local _parameter = apimodel._parameter(dt_param.name)
+ _parameter.description = dt_param.description
+
+ -- manage typeref
+ local dt_typeref = dt_param.type
+ if dt_typeref then
+ _parameter.type = createtyperef(dt_typeref,_file,sourcerangemin,sourcerangemax)
+ end
+ return _parameter
+end
+
+-- get or create the typedef with the name "name"
+function M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
+ if scope and not scope.module then
+ if _item.name then
+ if scope.type == "global" then
+ _file:addglobalvar(_item)
+ else
+ local _recordtypedef = gettypedef (_file, scope.type ,"recordtypedef",sourcerangemin,sourcerangemax)
+ _recordtypedef:addfield(_item)
+ end
+ else
+ -- if no item name precise we store the scope in the item to be able to add it to the right parent later
+ _item.scope = scope
+ end
+ end
+end
+
+-- Function type counter
+local i = 0
+
+-- Reset function type counter
+local function resetfunctiontypeidgenerator()
+ i = 0
+end
+
+-- Provides an unique index for a function type
+local function generatefunctiontypeid()
+ i = i + 1
+ return i
+end
+
+-- generate a function type name
+local function generatefunctiontypename(_functiontypedef)
+ local name = {"__"}
+ if _functiontypedef.returns and _functiontypedef.returns[1] then
+ local ret = _functiontypedef.returns[1]
+ for _, type in ipairs(ret.types) do
+ if type.typename then
+ if type.modulename then
+ table.insert(name,type.modulename)
+ end
+ table.insert(name,"#")
+ table.insert(name,type.typename)
+ end
+ end
+
+ end
+ table.insert(name,"=")
+ if _functiontypedef.params then
+ for _, param in ipairs(_functiontypedef.params) do
+ local type = param.type
+ if type then
+ if type.typename then
+ if type.modulename then
+ table.insert(name,type.modulename)
+ end
+ table.insert(name,"#")
+ table.insert(name,type.typename)
+ else
+ table.insert(name,"#unknown")
+ end
+ end
+ table.insert(name,"[")
+ table.insert(name,param.name)
+ table.insert(name,"]")
+ end
+ end
+ table.insert(name,"__")
+ table.insert(name, generatefunctiontypeid())
+ return table.concat(name)
+end
+
+--
+-- Store user defined tags
+--
+local function attachmetadata(apiobj, parsedcomment)
+ local thirdtags = parsedcomment and parsedcomment.unknowntags
+ if thirdtags then
+ -- Define a storage index for user defined tags on current API element
+ if not apiobj.metadata then apiobj.metadata = {} end
+
+ -- Loop over user defined tags
+ for usertag, taglist in pairs(thirdtags) do
+ if not apiobj.metadata[ usertag ] then
+ apiobj.metadata[ usertag ] = {
+ tag = usertag
+ }
+ end
+ for _, tag in ipairs( taglist ) do
+ table.insert(apiobj.metadata[usertag], tag)
+ end
+ end
+ end
+end
+
+
+------------------------------------------------------
+-- create the module api
+function M.createmoduleapi(ast,modulename)
+
+ -- Initialise function type naming
+ resetfunctiontypeidgenerator()
+
+ local _file = apimodel._file()
+
+ local _comment2apiobj = {}
+
+ local function handlecomment(comment)
+
+ -- Extract information from tagged comments
+ local parsedcomment = ldp.parse(comment[1])
+ if not parsedcomment then return nil end
+
+ -- Get tags from the languages
+ local regulartags = parsedcomment.tags
+
+ -- Will contain last API object generated from comments
+ local _lastapiobject
+
+ -- if comment is an ld comment
+ if regulartags then
+ -- manage "module" comment
+ if regulartags["module"] then
+ -- get name
+ _file.name = regulartags["module"][1].name or modulename
+ _lastapiobject = _file
+
+ -- manage descriptions
+ _file.shortdescription = parsedcomment.shortdescription
+ _file.description = parsedcomment.description
+
+ local sourcerangemin = comment.lineinfo.first.offset
+ local sourcerangemax = comment.lineinfo.last.offset
+
+ -- manage returns
+ if regulartags ["return"] then
+ for _, dt_return in ipairs(regulartags ["return"]) do
+ local _return = createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
+ table.insert(_file.returns,_return)
+ end
+ end
+ -- if no returns on module create a defaultreturn of type #modulename
+ if #_file.returns == 0 and _file.name then
+ -- create internal type ref
+ local _typeref = apimodel._internaltyperef()
+ _typeref.typename = _file.name
+
+ -- create return
+ local _return = apimodel._return()
+ table.insert(_return.types,_typeref)
+
+ -- add return
+ table.insert(_file.returns,_return)
+
+ --create recordtypedef is not define
+ local _moduletypedef = gettypedef(_file,_typeref.typename,"recordtypedef",sourcerangemin,sourcerangemax)
+
+ -- manage extends (inheritance) and structure tags
+ if _moduletypedef and _moduletypedef.tag == "recordtypedef" then
+ if regulartags["extends"] and regulartags["extends"][1] then
+ local supertype = regulartags["extends"][1].type
+ if supertype then _moduletypedef.supertype = createtyperef(supertype) end
+ end
+ if regulartags["map"] and regulartags["map"][1] then
+ local keytype = regulartags["map"][1].keytype
+ local valuetype = regulartags["map"][1].valuetype
+ if keytype and valuetype then
+ _moduletypedef.defaultkeytyperef = createtyperef(keytype)
+ _moduletypedef.defaultvaluetyperef = createtyperef(valuetype)
+ _moduletypedef.structurekind = "map"
+ _moduletypedef.structuredescription = regulartags["map"][1].description
+ end
+ elseif regulartags["list"] and regulartags["list"][1] then
+ local type = regulartags["list"][1].type
+ if type then
+ _moduletypedef.defaultvaluetyperef = createtyperef(type)
+ _moduletypedef.structurekind = "list"
+ _moduletypedef.structuredescription = regulartags["list"][1].description
+ end
+ end
+ end
+ end
+ -- manage "type" comment
+ elseif regulartags["type"] and regulartags["type"][1].name ~= "global" then
+ local dt_type = regulartags["type"][1];
+ -- create record type if it doesn't exist
+ local sourcerangemin = comment.lineinfo.first.offset
+ local sourcerangemax = comment.lineinfo.last.offset
+ local _recordtypedef = gettypedef (_file, dt_type.name ,"recordtypedef",sourcerangemin,sourcerangemax)
+ _lastapiobject = _recordtypedef
+
+ -- re-set sourcerange in case the type was created before the type tag
+ _recordtypedef.sourcerange.min = sourcerangemin
+ _recordtypedef.sourcerange.max = sourcerangemax
+
+ -- manage description
+ _recordtypedef.shortdescription = parsedcomment.shortdescription
+ _recordtypedef.description = parsedcomment.description
+
+ -- manage fields
+ if regulartags["field"] then
+ for _, dt_field in ipairs(regulartags["field"]) do
+ local _item = createfield(dt_field,_file,sourcerangemin,sourcerangemax)
+ -- define sourcerange only if we create it
+ _item.sourcerange.min = sourcerangemin
+ _item.sourcerange.max = sourcerangemax
+ if _item and _item.name then
+ _recordtypedef:addfield(_item) end
+ end
+ end
+
+ -- manage extends (inheritance)
+ if regulartags["extends"] and regulartags["extends"][1] then
+ local supertype = regulartags["extends"][1].type
+ if supertype then _recordtypedef.supertype = createtyperef(supertype) end
+ end
+
+ -- manage structure tag
+ if regulartags["map"] and regulartags["map"][1] then
+ local keytype = regulartags["map"][1].keytype
+ local valuetype = regulartags["map"][1].valuetype
+ if keytype and valuetype then
+ _recordtypedef.defaultkeytyperef = createtyperef(keytype)
+ _recordtypedef.defaultvaluetyperef = createtyperef(valuetype)
+ _recordtypedef.structurekind = "map"
+ _recordtypedef.structuredescription = regulartags["map"][1].description
+ end
+ elseif regulartags["list"] and regulartags["list"][1] then
+ local type = regulartags["list"][1].type
+ if type then
+ _recordtypedef.defaultvaluetyperef = createtyperef(type)
+ _recordtypedef.structurekind = "list"
+ _recordtypedef.structuredescription = regulartags["list"][1].description
+ end
+ end
+ elseif regulartags["field"] then
+ local dt_field = regulartags["field"][1]
+
+ -- create item
+ local sourcerangemin = comment.lineinfo.first.offset
+ local sourcerangemax = comment.lineinfo.last.offset
+ local _item = createfield(dt_field,_file,sourcerangemin,sourcerangemax)
+ _item.shortdescription = parsedcomment.shortdescription
+ _item.description = parsedcomment.description
+ _lastapiobject = _item
+
+ -- define sourcerange
+ _item.sourcerange.min = sourcerangemin
+ _item.sourcerange.max = sourcerangemax
+
+ -- add item to its parent
+ local scope = regulartags["field"][1].parent
+ M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
+ elseif regulartags["function"] or regulartags["param"] or regulartags["return"] or regulartags["callof"] then
+ -- create item
+ local _item = apimodel._item()
+ _item.shortdescription = parsedcomment.shortdescription
+ _item.description = parsedcomment.description
+ _lastapiobject = _item
+
+ -- set name
+ if regulartags["function"] then _item.name = regulartags["function"][1].name end
+
+ -- define sourcerange
+ local sourcerangemin = comment.lineinfo.first.offset
+ local sourcerangemax = comment.lineinfo.last.offset
+ _item.sourcerange.min = sourcerangemin
+ _item.sourcerange.max = sourcerangemax
+
+
+ -- create function type
+ local _functiontypedef = apimodel._functiontypedef()
+ _functiontypedef.shortdescription = parsedcomment.shortdescription
+ _functiontypedef.description = parsedcomment.description
+
+
+ -- manage params
+ if regulartags["param"] then
+ for _, dt_param in ipairs(regulartags["param"]) do
+ local _param = createparam(dt_param,_file,sourcerangemin,sourcerangemax)
+ table.insert(_functiontypedef.params,_param)
+ end
+ end
+
+ -- manage returns
+ if regulartags["return"] then
+ for _, dt_return in ipairs(regulartags["return"]) do
+ local _return = createreturn(dt_return,_file,sourcerangemin,sourcerangemax)
+ table.insert(_functiontypedef.returns,_return)
+ end
+ end
+
+ -- add type name
+ _functiontypedef.name = generatefunctiontypename(_functiontypedef)
+ attachmetadata(_functiontypedef, parsedcomment)
+ _file:addtype(_functiontypedef)
+
+ -- create ref to this type
+ local _internaltyperef = apimodel._internaltyperef()
+ _internaltyperef.typename = _functiontypedef.name
+ _item.type=_internaltyperef
+
+ -- add item to its parent
+ local sourcerangemin = comment.lineinfo.first.offset
+ local sourcerangemax = comment.lineinfo.last.offset
+ local scope = (regulartags["function"] and regulartags["function"][1].parent) or nil
+ M.additemtoparent(_file,_item,scope,sourcerangemin,sourcerangemax)
+
+ -- manage callof
+ if regulartags["callof"] and regulartags["callof"][1] and regulartags["callof"][1].type then
+ -- get the type which will be callable !
+ local _internaltyperef = createtyperef(regulartags["callof"][1].type)
+ if _internaltyperef and _internaltyperef.tag == "internaltyperef" then
+ local _typedeftypedef = gettypedef(_file,_internaltyperef.typename,"recordtypedef",sourcerangemin,sourcerangemax)
+ if _typedeftypedef then
+ -- refer the function used when the type is called
+ local _internaltyperef = apimodel._internaltyperef()
+ _internaltyperef.typename = _functiontypedef.name
+ _typedeftypedef.call =_internaltyperef
+ end
+ end
+ end
+ end
+ end
+
+ -- when we could not know which type of api object it is, we suppose this is an item
+ if not _lastapiobject then
+ _lastapiobject = apimodel._item()
+ _lastapiobject.shortdescription = parsedcomment.shortdescription
+ _lastapiobject.description = parsedcomment.description
+ _lastapiobject.sourcerange.min = comment.lineinfo.first.offset
+ _lastapiobject.sourcerange.max = comment.lineinfo.last.offset
+ end
+
+ attachmetadata(_lastapiobject, parsedcomment)
+
+ -- if we create an api object linked it to
+ _comment2apiobj[comment] =_lastapiobject
+ end
+
+ local function parsecomment(node, parent, ...)
+ -- check for comments before this node
+ if node.lineinfo and node.lineinfo.first.comments then
+ local comments = node.lineinfo.first.comments
+ -- check all comments
+ for _,comment in ipairs(comments) do
+ -- if not already handled
+ if not handledcomments[comment] then
+ handlecomment(comment)
+ handledcomments[comment]=true
+ end
+ end
+ end
+ -- check for comments after this node
+ if node.lineinfo and node.lineinfo.last.comments then
+ local comments = node.lineinfo.last.comments
+ -- check all comments
+ for _,comment in ipairs(comments) do
+ -- if not already handled
+ if not handledcomments[comment] then
+ handlecomment(comment)
+ handledcomments[comment]=true
+ end
+ end
+ end
+ end
+ Q(ast):filter(function(x) return x.tag~=nil end):foreach(parsecomment)
+ return _file, _comment2apiobj
+end
+
+
+function M.extractlocaltype ( commentblock,_file)
+ if not commentblock then return nil end
+
+ local stringcomment = commentblock[1]
+
+ local parsedtag = ldp.parseinlinecomment(stringcomment)
+ if parsedtag then
+ local sourcerangemin = commentblock.lineinfo.first.offset
+ local sourcerangemax = commentblock.lineinfo.last.offset
+
+ return createtyperef(parsedtag,_file,sourcerangemin,sourcerangemax), parsedtag.description
+ end
+
+ return nil, stringcomment
+end
+
+M.generatefunctiontypename = generatefunctiontypename
+
+return M
diff --git a/libraries/modelsbuilder/models/internalmodel.lua b/libraries/modelsbuilder/models/internalmodel.lua
index 047b15c..b0da390 100644
--- a/libraries/modelsbuilder/models/internalmodel.lua
+++ b/libraries/modelsbuilder/models/internalmodel.lua
@@ -12,54 +12,54 @@
local M = {}
function M._internalcontent()
- return {
- content = nil, -- block
- unknownglobalvars = {}, -- list of item
- tag = "MInternalContent"
- }
+ return {
+ content = nil, -- block
+ unknownglobalvars = {}, -- list of item
+ tag = "MInternalContent"
+ }
end
function M._block()
- return {
- content = {}, -- list of expr (identifier, index, call, invoke, block)
- localvars = {}, -- list of {var=item, scope ={min,max}}
- sourcerange = {min=0,max=0},
- tag = "MBlock"
- }
+ return {
+ content = {}, -- list of expr (identifier, index, call, invoke, block)
+ localvars = {}, -- list of {var=item, scope ={min,max}}
+ sourcerange = {min=0,max=0},
+ tag = "MBlock"
+ }
end
function M._identifier()
- return {
- definition = nil, -- item
- sourcerange = {min=0,max=0},
- tag = "MIdentifier"
- }
+ return {
+ definition = nil, -- item
+ sourcerange = {min=0,max=0},
+ tag = "MIdentifier"
+ }
end
function M._index(key, value)
- return {
- left= key, -- expr (identifier, index, call, invoke, block)
- right= value, -- string
- sourcerange = {min=0,max=0},
- tag = "MIndex"
- }
+ return {
+ left= key, -- expr (identifier, index, call, invoke, block)
+ right= value, -- string
+ sourcerange = {min=0,max=0},
+ tag = "MIndex"
+ }
end
function M._call(funct)
- return {
- func = funct, -- expr (identifier, index, call, invoke, block)
- sourcerange = {min=0,max=0},
- tag = "MCall"
- }
+ return {
+ func = funct, -- expr (identifier, index, call, invoke, block)
+ sourcerange = {min=0,max=0},
+ tag = "MCall"
+ }
end
function M._invoke(name, expr)
- return {
- functionname = name, -- string
- record = expr, -- expr (identifier, index, call, invoke, block)
- sourcerange = {min=0,max=0},
- tag = "MInvoke"
- }
+ return {
+ functionname = name, -- string
+ record = expr, -- expr (identifier, index, call, invoke, block)
+ sourcerange = {min=0,max=0},
+ tag = "MInvoke"
+ }
end
return M
diff --git a/libraries/modelsbuilder/models/internalmodelbuilder.mlua b/libraries/modelsbuilder/models/internalmodelbuilder.mlua
index 9c412ed..0184336 100644
--- a/libraries/modelsbuilder/models/internalmodelbuilder.mlua
+++ b/libraries/modelsbuilder/models/internalmodelbuilder.mlua
@@ -1,872 +1,872 @@
---------------------------------------------------------------------------------
--- Copyright (c) 2011-2012 Sierra Wireless.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Simon BERNARD <sbernard@sierrawireless.com>
--- - initial API and implementation and initial documentation
---------------------------------------------------------------------------------
--{ extension ('match', ...) }
-
-local Q = require 'metalua.treequery'
-
-local internalmodel = require 'models.internalmodel'
-local apimodel = require 'models.apimodel'
-local apimodelbuilder = require 'models.apimodelbuilder'
-
-local M = {}
-
--- Analyzes an AST and returns two tables
--- * `locals`, which associates `Id{ } nodes which create a local variable
--- to a list of the `Id{ } occurrence nodes of that variable;
--- * `globals` which associates variable names to occurrences of
--- global variables having that name.
-function bindings(ast)
- local locals, globals = { }, { }
- local function f(id, ...)
- local name = id[1]
- if Q.is_binder(id, ...) then
- local binder = ... -- parent is the binder
- locals[binder] = locals[binder] or { }
- locals[binder][name]={ }
- else
- local _, binder = Q.get_binder(id, ...)
- if binder then -- this is a local
- table.insert(locals[binder][name], id)
- else
- local g = globals[name]
- if g then table.insert(g, id) else globals[name]={id} end
- end
- end
- end
- Q(ast) :filter('Id') :foreach(f)
- return locals, globals
-end
-
--- --------------------------------------
-
--- ----------------------------------------------------------
--- return the comment linked before to this node
--- ----------------------------------------------------------
-local function getlinkedcommentbefore(node)
- local function _getlinkedcomment(node,line)
- if node and node.lineinfo and node.lineinfo.first.line == line then
- -- get the last comment before (the nearest of code)
- local comments = node.lineinfo.first.comments
- local comment = comments and comments[#comments]
- if comment and comment.lineinfo.last.line == line-1 then
- -- ignore the comment if there are code before on the same line
- if node.lineinfo.first.facing and (node.lineinfo.first.facing.line ~= comment.lineinfo.first.line) then
- return comment
- end
- else
- return _getlinkedcomment(node.parent,line)
- end
- end
- return nil
- end
-
- if node.lineinfo and node.lineinfo.first.line then
- return _getlinkedcomment(node,node.lineinfo.first.line)
- else
- return nil
- end
-end
-
--- ----------------------------------------------------------
--- return the comment linked after to this node
--- ----------------------------------------------------------
-local function getlinkedcommentafter(node)
- local function _getlinkedcomment(node,line)
- if node and node.lineinfo and node.lineinfo.last.line == line then
- -- get the first comment after (the nearest of code)
- local comments = node.lineinfo.last.comments
- local comment = comments and comments[1]
- if comment and comment.lineinfo.first.line == line then
- return comment
- else
- return _getlinkedcomment(node.parent,line)
- end
- end
- return nil
- end
-
- if node.lineinfo and node.lineinfo.last.line then
- return _getlinkedcomment(node,node.lineinfo.last.line)
- else
- return nil
- end
-end
-
--- ----------------------------------------------------------
--- return true if this node is a block for the internal representation
--- ----------------------------------------------------------
-local supported_b = {
- Function = true,
- Do = true,
- While = true,
- Fornum = true,
- Forin = true,
- Repeat = true,
-}
-local function supportedblock(node, parent)
- return supported_b[ node.tag ] or
- (parent and parent.tag == "If" and node.tag == nil)
-end
-
--- ----------------------------------------------------------
--- create a block from the metalua node
--- ----------------------------------------------------------
-local function createblock(block, parent)
- local _block = internalmodel._block()
- match block with
- | `Function{param, body}
- | `Do{...}
- | `Fornum {identifier, min, max, body}
- | `Forin {identifiers, exprs, body}
- | `Repeat {body, expr} ->
- _block.sourcerange.min = block.lineinfo.first.offset
- _block.sourcerange.max = block.lineinfo.last.offset
- | `While {expr, body} ->
- _block.sourcerange.min = body.lineinfo.first.facing.offset
- _block.sourcerange.max = body.lineinfo.last.facing.offset
- | _ ->
- if parent and parent.tag == "If" and block.tag == nil then
- _block.sourcerange.min = block.lineinfo.first.facing.offset
- _block.sourcerange.max = block.lineinfo.last.facing.offset
- end
- end
- return _block
-end
-
--- ----------------------------------------------------------
--- return true if this node is a expression in the internal representation
--- ----------------------------------------------------------
-local supported_e = {
- Index = true,
- Id = true,
- Call = true,
- Invoke = true
-}
-local function supportedexpr(node)
- return supported_e[ node.tag ]
-end
-
-local idto_block = {} -- cache from metalua id to internal model block
-local idto_identifier = {} -- cache from metalua id to internal model indentifier
-local expreto_expression = {} -- cache from metalua expression to internal model expression
-
--- ----------------------------------------------------------
--- create an expression from a metalua node
--- ----------------------------------------------------------
-local function createexpr(expr,_block)
- local _expr = nil
-
- match expr with
- | `Id { name } ->
- -- we store the block which hold this node
- -- to be able to define
- idto_block[expr]= _block
-
- -- if expr has not line info, it means expr has no representation in the code
- -- so we don't need it.
- if not expr.lineinfo then return nil end
-
- -- create identifier
- local _identifier = internalmodel._identifier()
- idto_identifier[expr]= _identifier
- _expr = _identifier
- | `Index { innerexpr, rightpart } ->
- if not expr.lineinfo then return nil end
- -- create index
- local _expression = createexpr(innerexpr,_block)
- if _expression then
- if rightpart and rightpart.tag=='String' then
- _expr = internalmodel._index(_expression,rightpart[1])
- else
- _expr = internalmodel._index(_expression,nil)
- end
- end
- | `Call{innerexpr, ...} ->
- if not expr.lineinfo then return nil end
- -- create call
- local _expression = createexpr(innerexpr,_block)
- if _expression then _expr = internalmodel._call(_expression) end
- | `Invoke{innerexpr,`String{functionname},...} ->
- if not expr.lineinfo then return nil end
- -- create invoke
- local _expression = createexpr(innerexpr,_block)
- if _expression then _expr = internalmodel._invoke(functionname,_expression) end
- | _ ->
- end
-
- if _expr then
- _expr.sourcerange.min = expr.lineinfo.first.offset
- _expr.sourcerange.max = expr.lineinfo.last.offset
-
- expreto_expression[expr] = _expr
- end
-
- return _expr
-end
-
--- ----------------------------------------------------------
--- create block and expression node
--- ----------------------------------------------------------
-local function createtreestructure(ast)
- -- create internal content
- local _internalcontent = internalmodel._internalcontent()
-
- -- create root block
- local _block = internalmodel._block()
- local _blocks = { _block }
- _block.sourcerange.min = ast.lineinfo.first.facing.offset
- -- TODO remove the math.max when we support partial AST
- _block.sourcerange.max = math.max(ast.lineinfo.last.facing.offset, 10000)
-
- _internalcontent.content = _block
-
- -- visitor function (down)
- local function down (node,parent)
- if supportedblock(node,parent) then
- -- create the block
- local _block = createblock(node,parent)
- -- add it to parent block
- table.insert(_blocks[#_blocks].content, _block)
- -- enqueue the last block to know the "current" block
- table.insert(_blocks,_block)
- elseif supportedexpr(node) then
- -- we handle expression only if it was not already do
- if not expreto_expression[node] then
- -- create expr
- local _expression = createexpr(node,_blocks[#_blocks])
- -- add it to parent block
- if _expression then
- table.insert(_blocks[#_blocks].content, _expression)
- end
- end
- end
- end
-
- -- visitor function (up)
- local function up (node, parent)
- if supportedblock(node,parent) then
- -- dequeue the last block to know the "current" block
- table.remove(_blocks,#_blocks)
- end
- end
-
- -- visit ast and build internal model
- Q(ast):foreach(down,up)
-
- return _internalcontent
-end
-
-local getitem
-
--- ----------------------------------------------------------
--- create the type from the node and position
--- ----------------------------------------------------------
-local function createtype(node,position,comment2apiobj,file)
- -- create module type ref
- match node with
- | `Call{ `Id "require", `String {modulename}} ->
- return apimodel._moduletyperef(modulename,position)
- | `Function {params, body} ->
- -- create the functiontypedef from code
- local _functiontypedef = apimodel._functiontypedef()
- for _, p in ipairs(params) do
- -- create parameters
- local paramname
- if p.tag=="Dots" then
- paramname = "..."
- else
- paramname = p[1]
- end
- local _param = apimodel._parameter(paramname)
- table.insert(_functiontypedef.params,_param)
- end
- _functiontypedef.name = "___" -- no name for inline type
-
- return apimodel._inlinetyperef(_functiontypedef)
- | `String {value} ->
- local typeref = apimodel._primitivetyperef("string")
- return typeref
- | `Number {value} ->
- local typeref = apimodel._primitivetyperef("number")
- return typeref
- | `True | `False ->
- local typeref = apimodel._primitivetyperef("boolean")
- return typeref
- | `Table {...} ->
- -- create recordtypedef from code
- local _recordtypedef = apimodel._recordtypedef("table")
- -- for each element of the table
- for i=1,select("#", ...) do
- local pair = select(i, ...)
- -- if this is a pair we create a new item in the type
- if pair.tag == "Pair" then
- -- create an item
- local _item = getitem(pair,nil, comment2apiobj,file)
- if _item then
- _recordtypedef:addfield(_item)
- end
- end
- end
- return apimodel._inlinetyperef(_recordtypedef)
- | _ ->
- end
- -- if node is an expression supported
- local supportedexpr = expreto_expression[node]
- if supportedexpr then
- -- create expression type ref
- return apimodel._exprtyperef(supportedexpr,position)
- end
-
-end
-
-local function completeapidoctype(apidoctype,itemname,init,file,comment2apiobj)
- if not apidoctype.name then
- apidoctype.name = itemname
- file:mergetype(apidoctype)
- end
-
- -- create type from code
- local typeref = createtype(init,1,comment2apiobj,file)
- if typeref and typeref.tag == "inlinetyperef"
- and typeref.def.tag == "recordtypedef" then
-
- -- set the name
- typeref.def.name = apidoctype.name
-
- -- merge the type with priority to documentation except for source range
- file:mergetype(typeref.def,false,true)
- end
-end
-
-local function completeapidocitem (apidocitem, itemname, init, file, binder, comment2apiobj)
- -- manage the case item has no name
- if not apidocitem.name then
- apidocitem.name = itemname
-
- -- if item has no name this means it could not be attach to a parent
- if apidocitem.scope then
- apimodelbuilder.additemtoparent(file,apidocitem,apidocitem.scope,apidocitem.sourcerange.min,apidocitem.sourcerange.max)
- apidocitem.scope = nil
- end
- end
-
- -- for function try to merge definition
- local apitype = apidocitem:resolvetype(file)
- if apitype and apitype.tag == "functiontypedef" then
- local codetype = createtype(init,1,comment2apiobj,file)
- if codetype and codetype.tag =="inlinetyperef" then
- codetype.def.name = apitype.name
- file:mergetype(codetype.def)
- end
- end
-
- -- manage the case item has no type
- if not apidocitem.type then
- -- extract typing from comment
- local type, desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
-
- if type then
- apidocitem.type = type
- else
- -- if not found extracttype from code
- apidocitem.type = createtype(init,1,comment2apiobj,file)
- end
-
- local apitype = apidocitem:resolvetype(file)
- if apitype and apitype.tag == "functiontypedef" and apidocitem.metadata then
- apitype.metadata = apidocitem.metadata
- end
- end
-end
-
--- ----------------------------------------------------------
--- create or get the item finding in the binder with the given itemname
--- return also the ast node corresponding to this item
--- ----------------------------------------------------------
-getitem = function (binder, itemname, comment2apiobj, file)
-
- -- local function to create item
- local function createitem(itemname, astnode, itemtype, description)
- local _item = apimodel._item(itemname)
- if description then _item.description = description end
- _item.type = itemtype
- if astnode and astnode.lineinfo then
- _item.sourcerange.min = astnode.lineinfo.first.offset
- _item.sourcerange.max = astnode.lineinfo.last.offset
- end
- return _item, astnode
- end
-
- -- try to match binder with known patter of item declaration
- match binder with
- | `Pair {string, init}
- | `Set { {`Index { right , string}}, {init,...}} if string and string.tag =="String" ->
- -- Pair and set is for searching field from type ..
- -- if the itemname is given this mean we search for a local or a global not a field type.
- if not itemname then
- local itemname = string[1]
-
- -- check for luadoc typing
- local commentbefore = getlinkedcommentbefore(binder)
- local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
- if apiobj then
- if apiobj.tag=="item" then
- if not apiobj.name or apiobj.name == itemname then
- -- use code to complete api information if it's necessary
- completeapidocitem(apiobj, itemname, init,file,binder,comment2apiobj)
- -- for item use code source range rather than doc source range
- if string and string.lineinfo then
- apiobj.sourcerange.min = string.lineinfo.first.offset
- apiobj.sourcerange.max = string.lineinfo.last.offset
- end
- return apiobj, string
- end
- elseif apiobj.tag=="recordtypedef" then
- -- use code to complete api information if it's necessary
- completeapidoctype(apiobj, itemname, init,file,comment2apiobj)
- return createitem(itemname, string, apimodel._internaltyperef(apiobj.name), nil)
- end
-
- -- if the apiobj could not be associated to the current obj,
- -- we do not use the documentation neither
- commentbefore = nil
- end
-
- -- else we use code to extract the type and description
- -- check for "local" typing
- local type, desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
- local desc = desc or (commentbefore and commentbefore[1])
- if type then
- return createitem(itemname, string, type, desc )
- else
- -- if no "local typing" extract type from code
- return createitem(itemname, string, createtype(init,1,comment2apiobj,file), desc)
- end
- end
- | `Set {ids, inits}
- | `Local {ids, inits} ->
- -- if this is a single local var declaration
- -- we check if there are a comment block linked and try to extract the type
- if #ids == 1 then
- local currentid, currentinit = ids[1],inits[1]
- -- ignore non Ids node
- if currentid.tag ~= 'Id' or currentid[1] ~= itemname then return nil end
-
- -- check for luadoc typing
- local commentbefore = getlinkedcommentbefore(binder)
- local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
- if apiobj then
- if apiobj.tag=="item" then
- -- use code to complete api information if it's necessary
- if not apiobj.name or apiobj.name == itemname then
- completeapidocitem(apiobj, itemname, currentinit,file,binder,comment2apiobj)
- -- if this is a global var or if is has no parent
- -- we do not create a new item
- if not apiobj.parent or apiobj.parent == file then
- -- for item use code source range rather than doc source range
- if currentid and currentid.lineinfo then
- apiobj.sourcerange.min = currentid.lineinfo.first.offset
- apiobj.sourcerange.max = currentid.lineinfo.last.offset
- end
- return apiobj, currentid
- else
- return createitem(itemname, currentid, apiobj.type, nil)
- end
- end
- elseif apiobj.tag=="recordtypedef" then
- -- use code to complete api information if it's necessary
- completeapidoctype(apiobj, itemname, currentinit,file,comment2apiobj)
- return createitem(itemname, currentid, apimodel._internaltyperef(apiobj.name), nil)
- end
-
- -- if the apiobj could not be associated to the current obj,
- -- we do not use the documentation neither
- commentbefore = nil
- end
-
- -- else we use code to extract the type and description
- -- check for "local" typing
- local type,desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
- desc = desc or (commentbefore and commentbefore[1])
- if type then
- return createitem(itemname, currentid, type, desc)
- else
- -- if no "local typing" extract type from code
- return createitem(itemname, currentid, createtype(currentinit,1,comment2apiobj,file), desc)
- end
- end
- -- else we use code to extract the type
- local init,returnposition = nil,1
- for i,id in ipairs(ids) do
- -- calculate the current return position
- if init and (init.tag == "Call" or init.tag == "Invoke") then
- -- if previous init was a call or an invoke
- -- we increment the returnposition
- returnposition= returnposition+1
- else
- -- if init is not a function call
- -- we change the init used to determine the type
- init = inits[i]
- end
-
- -- get the name of the current id
- local idname = id[1]
-
- -- if this is the good id
- if itemname == idname then
- -- create type from init node and return position
- return createitem (itemname, id, createtype(init,returnposition,comment2apiobj,file),nil)
- end
- end
- | `Function {params, body} ->
- for i,id in ipairs(params) do
- -- get the name of the current id
- local idname = id[1]
- -- if this is the good id
- if itemname == idname then
- -- extract param's type from luadocumentation
- local obj = comment2apiobj[getlinkedcommentbefore(binder)]
- if obj and obj.tag=="item" then
- local typedef = obj:resolvetype(file)
- if typedef and typedef.tag =="functiontypedef" then
- for j, param in ipairs(typedef.params) do
- if i==j then
- if i ==1 and itemname == "self" and param.type == nil
- and obj.parent and obj.parent.tag == "recordtypedef" and obj.parent.name then
- param.type = apimodel._internaltyperef(obj.parent.name)
- end
- -- TODO perhaps we must clone the typeref
- return createitem(itemname,id, param.type,param.description)
- end
- end
- end
- end
- return createitem(itemname,id)
- end
- end
- | `Forin {ids, expr, body} ->
- for i,id in ipairs(ids) do
- -- get the name of the current id
- local idname = id[1]
- -- if this is the good id
- if itemname == idname then
- -- return data : we can not guess the type for now
- return createitem(itemname,id)
- end
- end
- | `Fornum {id, ...} ->
- -- get the name of the current id
- local idname = id[1]
- -- if this is the good id
- if itemname == idname then
- -- return data : we can not guess the type for now
- return createitem(itemname,id)
- end
- | `Localrec {{id}, {func}} ->
- -- get the name of the current id
- local idname = id[1]
- -- if this is the good id
- if itemname == idname then
- -- check for luadoc typing
- local commentbefore = getlinkedcommentbefore(binder)
- local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
- if apiobj then
- if apiobj.tag=="item" then
- if not apiobj.name or apiobj.name == itemname then
- -- use code to complete api information if it's necessary
- completeapidocitem(apiobj, itemname, func,file,binder,comment2apiobj)
- return createitem(itemname,id,apiobj.type,nil)
- end
- end
-
- -- if the apiobj could not be associated to the current obj,
- -- we do not use the documentation neither
- commentbefore = nil
- end
-
- -- else we use code to extract the type and description
- -- check for "local" typing
- local type,desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
- desc = desc or (commentbefore and commentbefore[1])
- if type then
- return createitem(itemname, id, type, desc)
- else
- -- if no "local typing" extract type from code
- return createitem(itemname, id, createtype(func,1,comment2apiobj,file), desc)
- end
- end
- | _ ->
- end
-end
-
--- ----------------------------------------------------------
--- Search from Id node to Set node to find field of type.
---
--- Lua code : table.field1.field2 = 12
--- looks like that in metalua :
--- `Set{
--- `Index { `Index { `Id "table", `String "field1" },
--- `String "field2"},
--- `Number "12"}
--- ----------------------------------------------------------
-local function searchtypefield(node,_currentitem,comment2apiobj,file)
-
- -- we are just interested :
- -- by item which is field of recordtypedef
- -- by ast node which are Index
- if _currentitem then
- local type = _currentitem:resolvetype(file)
- if type and type.tag == "recordtypedef" then
- if node and node.tag == "Index" then
- local rightpart = node[2]
- local _newcurrentitem = type.fields[rightpart[1]]
-
- if _newcurrentitem then
- -- if this index represent a known field of the type we continue to search
- searchtypefield (node.parent,_newcurrentitem,comment2apiobj,file)
- else
- -- if not, this is perhaps a new field, but
- -- to be a new field this index must be include in a Set
- if node.parent and node.parent.tag =="Set" then
- -- in this case we create the new item ans add it to the type
- local set = node.parent
- local item, string = getitem(set,nil, comment2apiobj,file)
- -- add this item to the type, only if it has no parent and if this type does not contain already this field
- if item and not item.parent and string and not type.fields[string[1]] then
- type:addfield(item)
- end
- end
- end
- end
- end
- end
-end
-
--- ----------------------------------------------------------
--- create local vars, global vars and linked it with theirs occurences
--- ----------------------------------------------------------
-local function createvardefinitions(_internalcontent,ast,file,comment2apiobj)
- -- use bindings to get locals and globals definition
- local locals, globals = bindings( ast )
-
- -- create locals var
- for binder, namesAndOccurrences in pairs(locals) do
- for name, occurrences in pairs(namesAndOccurrences) do
- -- get item, id
- local _item, id = getitem(binder, name,comment2apiobj,file)
- if id then
- -- add definition as occurence
- -- we consider the identifier in the binder as an occurence
- local _identifierdef = idto_identifier[id]
- if _identifierdef then
- table.insert(_item.occurrences, _identifierdef)
- _identifierdef.definition = _item
- end
-
- -- add occurences
- for _,occurrence in ipairs(occurrences) do
- searchtypefield(occurrence.parent, _item,comment2apiobj,file)
- local _identifier = idto_identifier[occurrence]
- if _identifier then
- table.insert(_item.occurrences, _identifier)
- _identifier.definition = _item
- end
- end
-
- -- add item to block
- local _block = idto_block[id]
- table.insert(_block.localvars,{item=_item,scope = {min=0,max=0}})
- end
- end
- end
-
- -- create globals var
- for name, occurrences in pairs( globals ) do
-
- -- get or create definition
- local _item = file.globalvars[name]
- local binder = occurrences[1].parent
- if not _item then
- -- global declaration is only if the first occurence in left part of a 'Set'
- if binder and binder.tag == "Set" then
- _item = getitem(binder, name,comment2apiobj,file)
- end
-
- -- if we find and item this is a global var declaration
- if _item then
- file:addglobalvar(_item)
- else
- -- else it is an unknown global var
- _item = apimodel._item(name)
- local _firstoccurrence = idto_identifier[occurrences[1]]
- if _firstoccurrence then
- _item.sourcerange.min = _firstoccurrence.sourcerange.min
- _item.sourcerange.max = _firstoccurrence.sourcerange.max
- end
- table.insert(_internalcontent.unknownglobalvars,_item)
- end
- else
- -- if the global var definition already exists, we just try to it
- if binder then
- match binder with
- | `Set {ids, inits} ->
- -- manage case only if there are 1 element in the Set
- if #ids == 1 then
- local currentid, currentinit = ids[1],inits[1]
- -- ignore non Ids node and bad name
- if currentid.tag == 'Id' and currentid[1] == name then
- completeapidocitem(_item, name, currentinit,file,binder,comment2apiobj)
-
- if currentid and currentid.lineinfo then
- _item.sourcerange.min = currentid.lineinfo.first.offset
- _item.sourcerange.max = currentid.lineinfo.last.offset
- end
- end
- end
- | _ ->
- end
- end
- end
-
- -- add occurences
- for _,occurence in ipairs(occurrences) do
- local _identifier = idto_identifier[occurence]
- searchtypefield(occurence.parent, _item,comment2apiobj,file)
- if _identifier then
- table.insert(_item.occurrences, _identifier)
- _identifier.definition = _item
- end
- end
- end
-end
-
--- ----------------------------------------------------------
--- add parent to all ast node
--- ----------------------------------------------------------
-local function addparents(ast)
- -- visitor function (down)
- local function down (node,parent)
- node.parent = parent
- end
-
- -- visit ast and build internal model
- Q(ast):foreach(down,up)
-end
-
--- ----------------------------------------------------------
--- try to detect a module declaration from code
--- ----------------------------------------------------------
-local function searchmodule(ast,file,comment2apiobj,modulename)
- -- if the last statement is a return
- if ast then
- local laststatement = ast[#ast]
- if laststatement and laststatement.tag == "Return" then
- -- and if the first expression returned is an identifier.
- local firstexpr = laststatement[1]
- if firstexpr and firstexpr.tag == "Id" then
- -- get identifier in internal model
- local _identifier = idto_identifier [firstexpr]
- -- the definition should be an inline type
- if _identifier
- and _identifier.definition
- and _identifier.definition.type
- and _identifier.definition.type.tag == "inlinetyperef"
- and _identifier.definition.type.def.tag == "recordtypedef" then
-
- --set modulename if needed
- if not file.name then file.name = modulename end
-
- -- create or merge type
- local _type = _identifier.definition.type.def
- _type.name = modulename
-
- -- if file (module) has no documentation add item documentation to it
- -- else add it to the type.
- if not file.description or file.description == "" then
- file.description = _identifier.definition.description
- else
- _type.description = _identifier.definition.description
- end
- _identifier.definition.description = ""
- if not file.shortdescription or file.shortdescription == "" then
- file.shortdescription = _identifier.definition.shortdescription
- else
- _type.shortdescription = _identifier.definition.shortdescription
- end
- _identifier.definition.shortdescription = ""
-
- -- WORKAROUND FOR BUG 421622: [outline]module selection in outline does not select it in texteditor
- --_type.sourcerange.min = _identifier.definition.sourcerange.min
- --_type.sourcerange.max = _identifier.definition.sourcerange.max
-
- -- merge the type with priority to documentation except for source range
- file:mergetype(_type,false,true)
-
- -- create return if needed
- if not file.returns[1] then
- file.returns[1] = apimodel._return()
- file.returns[1].types = { apimodel._internaltyperef(modulename) }
- end
-
- -- change the type of the identifier
- _identifier.definition.type = apimodel._internaltyperef(modulename)
- end
- end
- end
- end
-end
-
--- ----------------------------------------------------------
--- create the internalcontent from an ast metalua
--- ----------------------------------------------------------
-function M.createinternalcontent (ast,file,comment2apiobj,modulename)
- -- init cache
- idto_block = {}
- idto_identifier = {}
- expreto_expression = {}
- comment2apiobj = comment2apiobj or {}
- file = file or apimodel._file()
-
- -- execute code safely to be sure to clean cache correctly
- local internalcontent
- local ok, errmsg = pcall(function ()
- -- add parent to all node
- addparents(ast)
-
- -- create block and expression node
- internalcontent = createtreestructure(ast)
-
- -- create Local vars, global vars and linked occurences (Items)
- createvardefinitions(internalcontent,ast,file,comment2apiobj)
-
- -- try to dectect module information from code
- local moduletyperef = file:moduletyperef()
- if moduletyperef and moduletyperef.tag == "internaltyperef" then
- modulename = moduletyperef.typename or modulename
- end
- if modulename then
- searchmodule(ast,file,comment2apiobj,modulename)
- end
- end)
-
- -- clean cache
- idto_block = {}
- idto_identifier = {}
- expreto_expression = {}
-
- -- if not ok raise an error
- if not ok then error (errmsg) end
-
- return internalcontent
-end
-
-return M
+--------------------------------------------------------------------------------
+-- Copyright (c) 2011-2012 Sierra Wireless.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Simon BERNARD <sbernard@sierrawireless.com>
+-- - initial API and implementation and initial documentation
+--------------------------------------------------------------------------------
+-{ extension ('match', ...) }
+
+local Q = require 'metalua.treequery'
+
+local internalmodel = require 'models.internalmodel'
+local apimodel = require 'models.apimodel'
+local apimodelbuilder = require 'models.apimodelbuilder'
+
+local M = {}
+
+-- Analyzes an AST and returns two tables
+-- * `locals`, which associates `Id{ } nodes which create a local variable
+-- to a list of the `Id{ } occurrence nodes of that variable;
+-- * `globals` which associates variable names to occurrences of
+-- global variables having that name.
+function bindings(ast)
+ local locals, globals = { }, { }
+ local function f(id, ...)
+ local name = id[1]
+ if Q.is_binder(id, ...) then
+ local binder = ... -- parent is the binder
+ locals[binder] = locals[binder] or { }
+ locals[binder][name]={ }
+ else
+ local _, binder = Q.get_binder(id, ...)
+ if binder then -- this is a local
+ table.insert(locals[binder][name], id)
+ else
+ local g = globals[name]
+ if g then table.insert(g, id) else globals[name]={id} end
+ end
+ end
+ end
+ Q(ast) :filter('Id') :foreach(f)
+ return locals, globals
+end
+
+-- --------------------------------------
+
+-- ----------------------------------------------------------
+-- return the comment linked before to this node
+-- ----------------------------------------------------------
+local function getlinkedcommentbefore(node)
+ local function _getlinkedcomment(node,line)
+ if node and node.lineinfo and node.lineinfo.first.line == line then
+ -- get the last comment before (the nearest of code)
+ local comments = node.lineinfo.first.comments
+ local comment = comments and comments[#comments]
+ if comment and comment.lineinfo.last.line == line-1 then
+ -- ignore the comment if there are code before on the same line
+ if node.lineinfo.first.facing and (node.lineinfo.first.facing.line ~= comment.lineinfo.first.line) then
+ return comment
+ end
+ else
+ return _getlinkedcomment(node.parent,line)
+ end
+ end
+ return nil
+ end
+
+ if node.lineinfo and node.lineinfo.first.line then
+ return _getlinkedcomment(node,node.lineinfo.first.line)
+ else
+ return nil
+ end
+end
+
+-- ----------------------------------------------------------
+-- return the comment linked after to this node
+-- ----------------------------------------------------------
+local function getlinkedcommentafter(node)
+ local function _getlinkedcomment(node,line)
+ if node and node.lineinfo and node.lineinfo.last.line == line then
+ -- get the first comment after (the nearest of code)
+ local comments = node.lineinfo.last.comments
+ local comment = comments and comments[1]
+ if comment and comment.lineinfo.first.line == line then
+ return comment
+ else
+ return _getlinkedcomment(node.parent,line)
+ end
+ end
+ return nil
+ end
+
+ if node.lineinfo and node.lineinfo.last.line then
+ return _getlinkedcomment(node,node.lineinfo.last.line)
+ else
+ return nil
+ end
+end
+
+-- ----------------------------------------------------------
+-- return true if this node is a block for the internal representation
+-- ----------------------------------------------------------
+local supported_b = {
+ Function = true,
+ Do = true,
+ While = true,
+ Fornum = true,
+ Forin = true,
+ Repeat = true,
+}
+local function supportedblock(node, parent)
+ return supported_b[ node.tag ] or
+ (parent and parent.tag == "If" and node.tag == nil)
+end
+
+-- ----------------------------------------------------------
+-- create a block from the metalua node
+-- ----------------------------------------------------------
+local function createblock(block, parent)
+ local _block = internalmodel._block()
+ match block with
+ | `Function{param, body}
+ | `Do{...}
+ | `Fornum {identifier, min, max, body}
+ | `Forin {identifiers, exprs, body}
+ | `Repeat {body, expr} ->
+ _block.sourcerange.min = block.lineinfo.first.offset
+ _block.sourcerange.max = block.lineinfo.last.offset
+ | `While {expr, body} ->
+ _block.sourcerange.min = body.lineinfo.first.facing.offset
+ _block.sourcerange.max = body.lineinfo.last.facing.offset
+ | _ ->
+ if parent and parent.tag == "If" and block.tag == nil then
+ _block.sourcerange.min = block.lineinfo.first.facing.offset
+ _block.sourcerange.max = block.lineinfo.last.facing.offset
+ end
+ end
+ return _block
+end
+
+-- ----------------------------------------------------------
+-- return true if this node is a expression in the internal representation
+-- ----------------------------------------------------------
+local supported_e = {
+ Index = true,
+ Id = true,
+ Call = true,
+ Invoke = true
+}
+local function supportedexpr(node)
+ return supported_e[ node.tag ]
+end
+
+local idto_block = {} -- cache from metalua id to internal model block
+local idto_identifier = {} -- cache from metalua id to internal model indentifier
+local expreto_expression = {} -- cache from metalua expression to internal model expression
+
+-- ----------------------------------------------------------
+-- create an expression from a metalua node
+-- ----------------------------------------------------------
+local function createexpr(expr,_block)
+ local _expr = nil
+
+ match expr with
+ | `Id { name } ->
+ -- we store the block which hold this node
+ -- to be able to define
+ idto_block[expr]= _block
+
+ -- if expr has not line info, it means expr has no representation in the code
+ -- so we don't need it.
+ if not expr.lineinfo then return nil end
+
+ -- create identifier
+ local _identifier = internalmodel._identifier()
+ idto_identifier[expr]= _identifier
+ _expr = _identifier
+ | `Index { innerexpr, rightpart } ->
+ if not expr.lineinfo then return nil end
+ -- create index
+ local _expression = createexpr(innerexpr,_block)
+ if _expression then
+ if rightpart and rightpart.tag=='String' then
+ _expr = internalmodel._index(_expression,rightpart[1])
+ else
+ _expr = internalmodel._index(_expression,nil)
+ end
+ end
+ | `Call{innerexpr, ...} ->
+ if not expr.lineinfo then return nil end
+ -- create call
+ local _expression = createexpr(innerexpr,_block)
+ if _expression then _expr = internalmodel._call(_expression) end
+ | `Invoke{innerexpr,`String{functionname},...} ->
+ if not expr.lineinfo then return nil end
+ -- create invoke
+ local _expression = createexpr(innerexpr,_block)
+ if _expression then _expr = internalmodel._invoke(functionname,_expression) end
+ | _ ->
+ end
+
+ if _expr then
+ _expr.sourcerange.min = expr.lineinfo.first.offset
+ _expr.sourcerange.max = expr.lineinfo.last.offset
+
+ expreto_expression[expr] = _expr
+ end
+
+ return _expr
+end
+
+-- ----------------------------------------------------------
+-- create block and expression node
+-- ----------------------------------------------------------
+local function createtreestructure(ast)
+ -- create internal content
+ local _internalcontent = internalmodel._internalcontent()
+
+ -- create root block
+ local _block = internalmodel._block()
+ local _blocks = { _block }
+ _block.sourcerange.min = ast.lineinfo.first.facing.offset
+ -- TODO remove the math.max when we support partial AST
+ _block.sourcerange.max = math.max(ast.lineinfo.last.facing.offset, 10000)
+
+ _internalcontent.content = _block
+
+ -- visitor function (down)
+ local function down (node,parent)
+ if supportedblock(node,parent) then
+ -- create the block
+ local _block = createblock(node,parent)
+ -- add it to parent block
+ table.insert(_blocks[#_blocks].content, _block)
+ -- enqueue the last block to know the "current" block
+ table.insert(_blocks,_block)
+ elseif supportedexpr(node) then
+ -- we handle expression only if it was not already do
+ if not expreto_expression[node] then
+ -- create expr
+ local _expression = createexpr(node,_blocks[#_blocks])
+ -- add it to parent block
+ if _expression then
+ table.insert(_blocks[#_blocks].content, _expression)
+ end
+ end
+ end
+ end
+
+ -- visitor function (up)
+ local function up (node, parent)
+ if supportedblock(node,parent) then
+ -- dequeue the last block to know the "current" block
+ table.remove(_blocks,#_blocks)
+ end
+ end
+
+ -- visit ast and build internal model
+ Q(ast):foreach(down,up)
+
+ return _internalcontent
+end
+
+local getitem
+
+-- ----------------------------------------------------------
+-- create the type from the node and position
+-- ----------------------------------------------------------
+local function createtype(node,position,comment2apiobj,file)
+ -- create module type ref
+ match node with
+ | `Call{ `Id "require", `String {modulename}} ->
+ return apimodel._moduletyperef(modulename,position)
+ | `Function {params, body} ->
+ -- create the functiontypedef from code
+ local _functiontypedef = apimodel._functiontypedef()
+ for _, p in ipairs(params) do
+ -- create parameters
+ local paramname
+ if p.tag=="Dots" then
+ paramname = "..."
+ else
+ paramname = p[1]
+ end
+ local _param = apimodel._parameter(paramname)
+ table.insert(_functiontypedef.params,_param)
+ end
+ _functiontypedef.name = "___" -- no name for inline type
+
+ return apimodel._inlinetyperef(_functiontypedef)
+ | `String {value} ->
+ local typeref = apimodel._primitivetyperef("string")
+ return typeref
+ | `Number {value} ->
+ local typeref = apimodel._primitivetyperef("number")
+ return typeref
+ | `True | `False ->
+ local typeref = apimodel._primitivetyperef("boolean")
+ return typeref
+ | `Table {...} ->
+ -- create recordtypedef from code
+ local _recordtypedef = apimodel._recordtypedef("table")
+ -- for each element of the table
+ for i=1,select("#", ...) do
+ local pair = select(i, ...)
+ -- if this is a pair we create a new item in the type
+ if pair.tag == "Pair" then
+ -- create an item
+ local _item = getitem(pair,nil, comment2apiobj,file)
+ if _item then
+ _recordtypedef:addfield(_item)
+ end
+ end
+ end
+ return apimodel._inlinetyperef(_recordtypedef)
+ | _ ->
+ end
+ -- if node is an expression supported
+ local supportedexpr = expreto_expression[node]
+ if supportedexpr then
+ -- create expression type ref
+ return apimodel._exprtyperef(supportedexpr,position)
+ end
+
+end
+
+local function completeapidoctype(apidoctype,itemname,init,file,comment2apiobj)
+ if not apidoctype.name then
+ apidoctype.name = itemname
+ file:mergetype(apidoctype)
+ end
+
+ -- create type from code
+ local typeref = createtype(init,1,comment2apiobj,file)
+ if typeref and typeref.tag == "inlinetyperef"
+ and typeref.def.tag == "recordtypedef" then
+
+ -- set the name
+ typeref.def.name = apidoctype.name
+
+ -- merge the type with priority to documentation except for source range
+ file:mergetype(typeref.def,false,true)
+ end
+end
+
+local function completeapidocitem (apidocitem, itemname, init, file, binder, comment2apiobj)
+ -- manage the case item has no name
+ if not apidocitem.name then
+ apidocitem.name = itemname
+
+ -- if item has no name this means it could not be attach to a parent
+ if apidocitem.scope then
+ apimodelbuilder.additemtoparent(file,apidocitem,apidocitem.scope,apidocitem.sourcerange.min,apidocitem.sourcerange.max)
+ apidocitem.scope = nil
+ end
+ end
+
+ -- for function try to merge definition
+ local apitype = apidocitem:resolvetype(file)
+ if apitype and apitype.tag == "functiontypedef" then
+ local codetype = createtype(init,1,comment2apiobj,file)
+ if codetype and codetype.tag =="inlinetyperef" then
+ codetype.def.name = apitype.name
+ file:mergetype(codetype.def)
+ end
+ end
+
+ -- manage the case item has no type
+ if not apidocitem.type then
+ -- extract typing from comment
+ local type, desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
+
+ if type then
+ apidocitem.type = type
+ else
+ -- if not found extracttype from code
+ apidocitem.type = createtype(init,1,comment2apiobj,file)
+ end
+
+ local apitype = apidocitem:resolvetype(file)
+ if apitype and apitype.tag == "functiontypedef" and apidocitem.metadata then
+ apitype.metadata = apidocitem.metadata
+ end
+ end
+end
+
+-- ----------------------------------------------------------
+-- create or get the item finding in the binder with the given itemname
+-- return also the ast node corresponding to this item
+-- ----------------------------------------------------------
+getitem = function (binder, itemname, comment2apiobj, file)
+
+ -- local function to create item
+ local function createitem(itemname, astnode, itemtype, description)
+ local _item = apimodel._item(itemname)
+ if description then _item.description = description end
+ _item.type = itemtype
+ if astnode and astnode.lineinfo then
+ _item.sourcerange.min = astnode.lineinfo.first.offset
+ _item.sourcerange.max = astnode.lineinfo.last.offset
+ end
+ return _item, astnode
+ end
+
+ -- try to match binder with known patter of item declaration
+ match binder with
+ | `Pair {string, init}
+ | `Set { {`Index { right , string}}, {init,...}} if string and string.tag =="String" ->
+ -- Pair and set is for searching field from type ..
+ -- if the itemname is given this mean we search for a local or a global not a field type.
+ if not itemname then
+ local itemname = string[1]
+
+ -- check for luadoc typing
+ local commentbefore = getlinkedcommentbefore(binder)
+ local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
+ if apiobj then
+ if apiobj.tag=="item" then
+ if not apiobj.name or apiobj.name == itemname then
+ -- use code to complete api information if it's necessary
+ completeapidocitem(apiobj, itemname, init,file,binder,comment2apiobj)
+ -- for item use code source range rather than doc source range
+ if string and string.lineinfo then
+ apiobj.sourcerange.min = string.lineinfo.first.offset
+ apiobj.sourcerange.max = string.lineinfo.last.offset
+ end
+ return apiobj, string
+ end
+ elseif apiobj.tag=="recordtypedef" then
+ -- use code to complete api information if it's necessary
+ completeapidoctype(apiobj, itemname, init,file,comment2apiobj)
+ return createitem(itemname, string, apimodel._internaltyperef(apiobj.name), nil)
+ end
+
+ -- if the apiobj could not be associated to the current obj,
+ -- we do not use the documentation neither
+ commentbefore = nil
+ end
+
+ -- else we use code to extract the type and description
+ -- check for "local" typing
+ local type, desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
+ local desc = desc or (commentbefore and commentbefore[1])
+ if type then
+ return createitem(itemname, string, type, desc )
+ else
+ -- if no "local typing" extract type from code
+ return createitem(itemname, string, createtype(init,1,comment2apiobj,file), desc)
+ end
+ end
+ | `Set {ids, inits}
+ | `Local {ids, inits} ->
+ -- if this is a single local var declaration
+ -- we check if there are a comment block linked and try to extract the type
+ if #ids == 1 then
+ local currentid, currentinit = ids[1],inits[1]
+ -- ignore non Ids node
+ if currentid.tag ~= 'Id' or currentid[1] ~= itemname then return nil end
+
+ -- check for luadoc typing
+ local commentbefore = getlinkedcommentbefore(binder)
+ local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
+ if apiobj then
+ if apiobj.tag=="item" then
+ -- use code to complete api information if it's necessary
+ if not apiobj.name or apiobj.name == itemname then
+ completeapidocitem(apiobj, itemname, currentinit,file,binder,comment2apiobj)
+ -- if this is a global var or if is has no parent
+ -- we do not create a new item
+ if not apiobj.parent or apiobj.parent == file then
+ -- for item use code source range rather than doc source range
+ if currentid and currentid.lineinfo then
+ apiobj.sourcerange.min = currentid.lineinfo.first.offset
+ apiobj.sourcerange.max = currentid.lineinfo.last.offset
+ end
+ return apiobj, currentid
+ else
+ return createitem(itemname, currentid, apiobj.type, nil)
+ end
+ end
+ elseif apiobj.tag=="recordtypedef" then
+ -- use code to complete api information if it's necessary
+ completeapidoctype(apiobj, itemname, currentinit,file,comment2apiobj)
+ return createitem(itemname, currentid, apimodel._internaltyperef(apiobj.name), nil)
+ end
+
+ -- if the apiobj could not be associated to the current obj,
+ -- we do not use the documentation neither
+ commentbefore = nil
+ end
+
+ -- else we use code to extract the type and description
+ -- check for "local" typing
+ local type,desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
+ desc = desc or (commentbefore and commentbefore[1])
+ if type then
+ return createitem(itemname, currentid, type, desc)
+ else
+ -- if no "local typing" extract type from code
+ return createitem(itemname, currentid, createtype(currentinit,1,comment2apiobj,file), desc)
+ end
+ end
+ -- else we use code to extract the type
+ local init,returnposition = nil,1
+ for i,id in ipairs(ids) do
+ -- calculate the current return position
+ if init and (init.tag == "Call" or init.tag == "Invoke") then
+ -- if previous init was a call or an invoke
+ -- we increment the returnposition
+ returnposition= returnposition+1
+ else
+ -- if init is not a function call
+ -- we change the init used to determine the type
+ init = inits[i]
+ end
+
+ -- get the name of the current id
+ local idname = id[1]
+
+ -- if this is the good id
+ if itemname == idname then
+ -- create type from init node and return position
+ return createitem (itemname, id, createtype(init,returnposition,comment2apiobj,file),nil)
+ end
+ end
+ | `Function {params, body} ->
+ for i,id in ipairs(params) do
+ -- get the name of the current id
+ local idname = id[1]
+ -- if this is the good id
+ if itemname == idname then
+ -- extract param's type from luadocumentation
+ local obj = comment2apiobj[getlinkedcommentbefore(binder)]
+ if obj and obj.tag=="item" then
+ local typedef = obj:resolvetype(file)
+ if typedef and typedef.tag =="functiontypedef" then
+ for j, param in ipairs(typedef.params) do
+ if i==j then
+ if i ==1 and itemname == "self" and param.type == nil
+ and obj.parent and obj.parent.tag == "recordtypedef" and obj.parent.name then
+ param.type = apimodel._internaltyperef(obj.parent.name)
+ end
+ -- TODO perhaps we must clone the typeref
+ return createitem(itemname,id, param.type,param.description)
+ end
+ end
+ end
+ end
+ return createitem(itemname,id)
+ end
+ end
+ | `Forin {ids, expr, body} ->
+ for i,id in ipairs(ids) do
+ -- get the name of the current id
+ local idname = id[1]
+ -- if this is the good id
+ if itemname == idname then
+ -- return data : we can not guess the type for now
+ return createitem(itemname,id)
+ end
+ end
+ | `Fornum {id, ...} ->
+ -- get the name of the current id
+ local idname = id[1]
+ -- if this is the good id
+ if itemname == idname then
+ -- return data : we can not guess the type for now
+ return createitem(itemname,id)
+ end
+ | `Localrec {{id}, {func}} ->
+ -- get the name of the current id
+ local idname = id[1]
+ -- if this is the good id
+ if itemname == idname then
+ -- check for luadoc typing
+ local commentbefore = getlinkedcommentbefore(binder)
+ local apiobj = comment2apiobj[commentbefore] -- find apiobj linked to this comment
+ if apiobj then
+ if apiobj.tag=="item" then
+ if not apiobj.name or apiobj.name == itemname then
+ -- use code to complete api information if it's necessary
+ completeapidocitem(apiobj, itemname, func,file,binder,comment2apiobj)
+ return createitem(itemname,id,apiobj.type,nil)
+ end
+ end
+
+ -- if the apiobj could not be associated to the current obj,
+ -- we do not use the documentation neither
+ commentbefore = nil
+ end
+
+ -- else we use code to extract the type and description
+ -- check for "local" typing
+ local type,desc = apimodelbuilder.extractlocaltype(getlinkedcommentafter(binder),file)
+ desc = desc or (commentbefore and commentbefore[1])
+ if type then
+ return createitem(itemname, id, type, desc)
+ else
+ -- if no "local typing" extract type from code
+ return createitem(itemname, id, createtype(func,1,comment2apiobj,file), desc)
+ end
+ end
+ | _ ->
+ end
+end
+
+-- ----------------------------------------------------------
+-- Search from Id node to Set node to find field of type.
+--
+-- Lua code : table.field1.field2 = 12
+-- looks like that in metalua :
+-- `Set{
+-- `Index { `Index { `Id "table", `String "field1" },
+-- `String "field2"},
+-- `Number "12"}
+-- ----------------------------------------------------------
+local function searchtypefield(node,_currentitem,comment2apiobj,file)
+
+ -- we are just interested :
+ -- by item which is field of recordtypedef
+ -- by ast node which are Index
+ if _currentitem then
+ local type = _currentitem:resolvetype(file)
+ if type and type.tag == "recordtypedef" then
+ if node and node.tag == "Index" then
+ local rightpart = node[2]
+ local _newcurrentitem = type.fields[rightpart[1]]
+
+ if _newcurrentitem then
+ -- if this index represent a known field of the type we continue to search
+ searchtypefield (node.parent,_newcurrentitem,comment2apiobj,file)
+ else
+ -- if not, this is perhaps a new field, but
+ -- to be a new field this index must be include in a Set
+ if node.parent and node.parent.tag =="Set" then
+ -- in this case we create the new item ans add it to the type
+ local set = node.parent
+ local item, string = getitem(set,nil, comment2apiobj,file)
+ -- add this item to the type, only if it has no parent and if this type does not contain already this field
+ if item and not item.parent and string and not type.fields[string[1]] then
+ type:addfield(item)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+-- ----------------------------------------------------------
+-- create local vars, global vars and linked it with theirs occurences
+-- ----------------------------------------------------------
+local function createvardefinitions(_internalcontent,ast,file,comment2apiobj)
+ -- use bindings to get locals and globals definition
+ local locals, globals = bindings( ast )
+
+ -- create locals var
+ for binder, namesAndOccurrences in pairs(locals) do
+ for name, occurrences in pairs(namesAndOccurrences) do
+ -- get item, id
+ local _item, id = getitem(binder, name,comment2apiobj,file)
+ if id then
+ -- add definition as occurence
+ -- we consider the identifier in the binder as an occurence
+ local _identifierdef = idto_identifier[id]
+ if _identifierdef then
+ table.insert(_item.occurrences, _identifierdef)
+ _identifierdef.definition = _item
+ end
+
+ -- add occurences
+ for _,occurrence in ipairs(occurrences) do
+ searchtypefield(occurrence.parent, _item,comment2apiobj,file)
+ local _identifier = idto_identifier[occurrence]
+ if _identifier then
+ table.insert(_item.occurrences, _identifier)
+ _identifier.definition = _item
+ end
+ end
+
+ -- add item to block
+ local _block = idto_block[id]
+ table.insert(_block.localvars,{item=_item,scope = {min=0,max=0}})
+ end
+ end
+ end
+
+ -- create globals var
+ for name, occurrences in pairs( globals ) do
+
+ -- get or create definition
+ local _item = file.globalvars[name]
+ local binder = occurrences[1].parent
+ if not _item then
+ -- global declaration is only if the first occurence in left part of a 'Set'
+ if binder and binder.tag == "Set" then
+ _item = getitem(binder, name,comment2apiobj,file)
+ end
+
+ -- if we find and item this is a global var declaration
+ if _item then
+ file:addglobalvar(_item)
+ else
+ -- else it is an unknown global var
+ _item = apimodel._item(name)
+ local _firstoccurrence = idto_identifier[occurrences[1]]
+ if _firstoccurrence then
+ _item.sourcerange.min = _firstoccurrence.sourcerange.min
+ _item.sourcerange.max = _firstoccurrence.sourcerange.max
+ end
+ table.insert(_internalcontent.unknownglobalvars,_item)
+ end
+ else
+ -- if the global var definition already exists, we just try to it
+ if binder then
+ match binder with
+ | `Set {ids, inits} ->
+ -- manage case only if there are 1 element in the Set
+ if #ids == 1 then
+ local currentid, currentinit = ids[1],inits[1]
+ -- ignore non Ids node and bad name
+ if currentid.tag == 'Id' and currentid[1] == name then
+ completeapidocitem(_item, name, currentinit,file,binder,comment2apiobj)
+
+ if currentid and currentid.lineinfo then
+ _item.sourcerange.min = currentid.lineinfo.first.offset
+ _item.sourcerange.max = currentid.lineinfo.last.offset
+ end
+ end
+ end
+ | _ ->
+ end
+ end
+ end
+
+ -- add occurences
+ for _,occurence in ipairs(occurrences) do
+ local _identifier = idto_identifier[occurence]
+ searchtypefield(occurence.parent, _item,comment2apiobj,file)
+ if _identifier then
+ table.insert(_item.occurrences, _identifier)
+ _identifier.definition = _item
+ end
+ end
+ end
+end
+
+-- ----------------------------------------------------------
+-- add parent to all ast node
+-- ----------------------------------------------------------
+local function addparents(ast)
+ -- visitor function (down)
+ local function down (node,parent)
+ node.parent = parent
+ end
+
+ -- visit ast and build internal model
+ Q(ast):foreach(down,up)
+end
+
+-- ----------------------------------------------------------
+-- try to detect a module declaration from code
+-- ----------------------------------------------------------
+local function searchmodule(ast,file,comment2apiobj,modulename)
+ -- if the last statement is a return
+ if ast then
+ local laststatement = ast[#ast]
+ if laststatement and laststatement.tag == "Return" then
+ -- and if the first expression returned is an identifier.
+ local firstexpr = laststatement[1]
+ if firstexpr and firstexpr.tag == "Id" then
+ -- get identifier in internal model
+ local _identifier = idto_identifier [firstexpr]
+ -- the definition should be an inline type
+ if _identifier
+ and _identifier.definition
+ and _identifier.definition.type
+ and _identifier.definition.type.tag == "inlinetyperef"
+ and _identifier.definition.type.def.tag == "recordtypedef" then
+
+ --set modulename if needed
+ if not file.name then file.name = modulename end
+
+ -- create or merge type
+ local _type = _identifier.definition.type.def
+ _type.name = modulename
+
+ -- if file (module) has no documentation add item documentation to it
+ -- else add it to the type.
+ if not file.description or file.description == "" then
+ file.description = _identifier.definition.description
+ else
+ _type.description = _identifier.definition.description
+ end
+ _identifier.definition.description = ""
+ if not file.shortdescription or file.shortdescription == "" then
+ file.shortdescription = _identifier.definition.shortdescription
+ else
+ _type.shortdescription = _identifier.definition.shortdescription
+ end
+ _identifier.definition.shortdescription = ""
+
+ -- WORKAROUND FOR BUG 421622: [outline]module selection in outline does not select it in texteditor
+ --_type.sourcerange.min = _identifier.definition.sourcerange.min
+ --_type.sourcerange.max = _identifier.definition.sourcerange.max
+
+ -- merge the type with priority to documentation except for source range
+ file:mergetype(_type,false,true)
+
+ -- create return if needed
+ if not file.returns[1] then
+ file.returns[1] = apimodel._return()
+ file.returns[1].types = { apimodel._internaltyperef(modulename) }
+ end
+
+ -- change the type of the identifier
+ _identifier.definition.type = apimodel._internaltyperef(modulename)
+ end
+ end
+ end
+ end
+end
+
+-- ----------------------------------------------------------
+-- create the internalcontent from an ast metalua
+-- ----------------------------------------------------------
+function M.createinternalcontent (ast,file,comment2apiobj,modulename)
+ -- init cache
+ idto_block = {}
+ idto_identifier = {}
+ expreto_expression = {}
+ comment2apiobj = comment2apiobj or {}
+ file = file or apimodel._file()
+
+ -- execute code safely to be sure to clean cache correctly
+ local internalcontent
+ local ok, errmsg = pcall(function ()
+ -- add parent to all node
+ addparents(ast)
+
+ -- create block and expression node
+ internalcontent = createtreestructure(ast)
+
+ -- create Local vars, global vars and linked occurences (Items)
+ createvardefinitions(internalcontent,ast,file,comment2apiobj)
+
+ -- try to dectect module information from code
+ local moduletyperef = file:moduletyperef()
+ if moduletyperef and moduletyperef.tag == "internaltyperef" then
+ modulename = moduletyperef.typename or modulename
+ end
+ if modulename then
+ searchmodule(ast,file,comment2apiobj,modulename)
+ end
+ end)
+
+ -- clean cache
+ idto_block = {}
+ idto_identifier = {}
+ expreto_expression = {}
+
+ -- if not ok raise an error
+ if not ok then error (errmsg) end
+
+ return internalcontent
+end
+
+return M
diff --git a/libraries/modelsbuilder/models/ldparser.lua b/libraries/modelsbuilder/models/ldparser.lua
index 4a969d1..ab947de 100644
--- a/libraries/modelsbuilder/models/ldparser.lua
+++ b/libraries/modelsbuilder/models/ldparser.lua
@@ -21,26 +21,26 @@
-- copy key and value from one table to an other
-- ----------------------------------------------------
local function copykey(tablefrom, tableto)
- for key, value in pairs(tablefrom) do
- if key ~= "lineinfos" then
- tableto[key] = value
- end
- end
+ for key, value in pairs(tablefrom) do
+ if key ~= "lineinfos" then
+ tableto[key] = value
+ end
+ end
end
-- ----------------------------------------------------
-- Handle keyword and identifiers as word
-- ----------------------------------------------------
local function parseword(lx)
- local word = lx :peek()
- local tag = word.tag
-
- if tag=='Keyword' or tag=='Id' then
- lx:next()
- return {tag='Word', lineinfo=word.lineinfo, word[1]}
- else
- return gg.parse_error(lx,'Id or Keyword expected')
- end
+ local word = lx :peek()
+ local tag = word.tag
+
+ if tag=='Keyword' or tag=='Id' then
+ lx:next()
+ return {tag='Word', lineinfo=word.lineinfo, word[1]}
+ else
+ return gg.parse_error(lx,'Id or Keyword expected')
+ end
end
-- ----------------------------------------------------
@@ -48,10 +48,10 @@
-- return a table {name, lineinfo)
-- ----------------------------------------------------
local idparser = gg.sequence({
- builder = function (result)
- return { name = result[1][1] }
- end,
- parseword
+ builder = function (result)
+ return { name = result[1][1] }
+ end,
+ parseword
})
-- ----------------------------------------------------
@@ -59,15 +59,15 @@
-- return a table {name, lineinfo)
-- ----------------------------------------------------
local modulenameparser = gg.list({
- builder = function (result)
- local ids = {}
- for i, id in ipairs(result) do
- table.insert(ids,id.name)
- end
- return {name = table.concat(ids,".")}
- end,
- primary = idparser,
- separators = '.'
+ builder = function (result)
+ local ids = {}
+ for i, id in ipairs(result) do
+ table.insert(ids,id.name)
+ end
+ return {name = table.concat(ids,".")}
+ end,
+ primary = idparser,
+ separators = '.'
})
-- ----------------------------------------------------
-- parse a typename (id.)?id
@@ -99,10 +99,10 @@
-- parse an external type ref
-- ----------------------------------------------------
local externaltyperefparser = gg.sequence({
- builder = function(result)
- return {tag = "typeref",module=result[1].name,type=result[2].name}
- end,
- modulenameparser,"#", typenameparser
+ builder = function(result)
+ return {tag = "typeref",module=result[1].name,type=result[2].name}
+ end,
+ modulenameparser,"#", typenameparser
})
-- ----------------------------------------------------
@@ -115,20 +115,20 @@
-- parse a structure type, without the first #
-- ----------------------------------------------------
local sharplesslisttyperefparser = gg.sequence({
- builder = function(result)
- return {tag = "typeref", type="list", valuetype=result[1]}
- end,
- "list","<", typerefparser, ">"
+ builder = function(result)
+ return {tag = "typeref", type="list", valuetype=result[1]}
+ end,
+ "list","<", typerefparser, ">"
})
-- ----------------------------------------------------
-- parse a map type, without the first #
-- ----------------------------------------------------
local sharplessmaptyperefparser = gg.sequence({
- builder = function(result)
- return {tag = "typeref", type="map", keytype=result[1], valuetype=result[2]}
- end,
- "map","<", typerefparser, ",", typerefparser, ">"
+ builder = function(result)
+ return {tag = "typeref", type="map", keytype=result[1], valuetype=result[2]}
+ end,
+ "map","<", typerefparser, ",", typerefparser, ">"
})
-- ----------------------------------------------------
@@ -151,9 +151,9 @@
-- ----------------------------------------------------
-- parse a typeref
-- ----------------------------------------------------
-_typerefparser = gg.multisequence({
+_typerefparser = gg.multisequence({
sharptyperefparser,
- externaltyperefparser
+ externaltyperefparser
})
-- ----------------------------------------------------
@@ -161,8 +161,8 @@
-- return a list of table {name, lineinfo)
-- ----------------------------------------------------
local typereflistparser = gg.list({
- primary = typerefparser,
- separators = ','
+ primary = typerefparser,
+ separators = ','
})
-- ----------------------------------------------------
@@ -170,10 +170,10 @@
-- TODO support more than one modifier
-- ----------------------------------------------------
local modifiersparser = gg.sequence({
- builder = function(result)
- return {[result[1].name]=result[2]}
- end,
- "[", idparser , "=" , internaltyperefparser , "]"
+ builder = function(result)
+ return {[result[1].name]=result[2]}
+ end,
+ "[", idparser , "=" , internaltyperefparser , "]"
})
-- ----------------------------------------------------
@@ -232,35 +232,35 @@
-- parse a return tag
-- ----------------------------------------------------
local returnparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- return { types= result[1]}
- end,
- '@','return', typereflistparser
- }),
- -- parser without typerefs
- gg.sequence({
- builder = function (result)
- return { types = {}}
- end,
- '@','return'
- })
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ return { types= result[1]}
+ end,
+ '@','return', typereflistparser
+ }),
+ -- parser without typerefs
+ gg.sequence({
+ builder = function (result)
+ return { types = {}}
+ end,
+ '@','return'
+ })
}
-- ----------------------------------------------------
-- parse a param tag
-- ----------------------------------------------------
local paramparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- return { name = result[2].name, type = result[1]}
- end,
- '@','param', typerefparser, idparser
- }),
-
- -- reject the case were only a type without name
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ return { name = result[2].name, type = result[1]}
+ end,
+ '@','param', typerefparser, idparser
+ }),
+
+ -- reject the case were only a type without name
gg.sequence({
builder = function (result)
return {tag="Error"}
@@ -268,39 +268,39 @@
'@','param', '#'
}),
- -- parser without type
- gg.sequence({
- builder = function (result)
- return { name = result[1].name}
- end,
- '@','param', idparser
- }),
+ -- parser without type
+ gg.sequence({
+ builder = function (result)
+ return { name = result[1].name}
+ end,
+ '@','param', idparser
+ }),
- -- Parser for `Dots
- gg.sequence({
- builder = function (result)
- return { name = '...' }
- end,
- '@','param', '...'
- }),
+ -- Parser for `Dots
+ gg.sequence({
+ builder = function (result)
+ return { name = '...' }
+ end,
+ '@','param', '...'
+ }),
}
-- ----------------------------------------------------
-- parse a field tag
-- ----------------------------------------------------
local fieldparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- tag.type = result[2]
- tag.name = result[3].name
- return tag
- end,
- '@','field', modifiersparser, typerefparser, idparser
- }),
-
- -- reject the case where the type name is empty
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ tag.type = result[2]
+ tag.name = result[3].name
+ return tag
+ end,
+ '@','field', modifiersparser, typerefparser, idparser
+ }),
+
+ -- reject the case where the type name is empty
gg.sequence({
builder = function (result)
return {tag = "Error"}
@@ -308,77 +308,77 @@
'@','field',modifiersparser, '#'
}),
- -- parser without name
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- tag.type = result[2]
- return tag
- end,
- '@','field', modifiersparser, typerefparser
- }),
-
- -- parser without type
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- tag.name = result[2].name
- return tag
- end,
- '@','field', modifiersparser, idparser
- }),
+ -- parser without name
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ tag.type = result[2]
+ return tag
+ end,
+ '@','field', modifiersparser, typerefparser
+ }),
- -- parser without type and name
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- return tag
- end,
- '@','field', modifiersparser
- }),
-
- -- parser without modifiers
- gg.sequence({
- builder = function (result)
- return { name = result[2].name, type = result[1]}
- end,
- '@','field', typerefparser, idparser
- }),
+ -- parser without type
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ tag.name = result[2].name
+ return tag
+ end,
+ '@','field', modifiersparser, idparser
+ }),
- -- parser without modifiers and name
- gg.sequence({
- builder = function (result)
- return {type = result[1]}
- end,
- '@','field', typerefparser
- }),
-
- -- reject the case where the type name is empty
+ -- parser without type and name
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ return tag
+ end,
+ '@','field', modifiersparser
+ }),
+
+ -- parser without modifiers
+ gg.sequence({
+ builder = function (result)
+ return { name = result[2].name, type = result[1]}
+ end,
+ '@','field', typerefparser, idparser
+ }),
+
+ -- parser without modifiers and name
+ gg.sequence({
+ builder = function (result)
+ return {type = result[1]}
+ end,
+ '@','field', typerefparser
+ }),
+
+ -- reject the case where the type name is empty
gg.sequence({
builder = function (result)
return {tag = "Error"}
end,
'@','field', '#'
}),
-
- -- parser without type and modifiers
- gg.sequence({
- builder = function (result)
- return { name = result[1].name}
- end,
- '@','field', idparser
- }),
-
- -- parser with nothing
- gg.sequence({
- builder = function (result)
- return {}
- end,
- '@','field'
- })
+
+ -- parser without type and modifiers
+ gg.sequence({
+ builder = function (result)
+ return { name = result[1].name}
+ end,
+ '@','field', idparser
+ }),
+
+ -- parser with nothing
+ gg.sequence({
+ builder = function (result)
+ return {}
+ end,
+ '@','field'
+ })
}
-- ----------------------------------------------------
@@ -386,136 +386,136 @@
-- TODO use a more generic way to parse modifier !
-- ----------------------------------------------------
local functionparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- tag.name = result[2].name
- return tag
- end,
- '@','function', modifiersparser, idparser
- }),
-
- -- parser without name
- gg.sequence({
- builder = function (result)
- local tag = {}
- copykey(result[1],tag)
- return tag
- end,
- '@','function', modifiersparser
- }),
-
- -- parser without modifier
- gg.sequence({
- builder = function (result)
- local tag = {}
- tag.name = result[1].name
- return tag
- end,
- '@','function', idparser
- }),
-
- -- empty parser
- gg.sequence({
- builder = function (result)
- return {}
- end,
- '@','function'
- })
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ tag.name = result[2].name
+ return tag
+ end,
+ '@','function', modifiersparser, idparser
+ }),
+
+ -- parser without name
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ copykey(result[1],tag)
+ return tag
+ end,
+ '@','function', modifiersparser
+ }),
+
+ -- parser without modifier
+ gg.sequence({
+ builder = function (result)
+ local tag = {}
+ tag.name = result[1].name
+ return tag
+ end,
+ '@','function', idparser
+ }),
+
+ -- empty parser
+ gg.sequence({
+ builder = function (result)
+ return {}
+ end,
+ '@','function'
+ })
}
-- ----------------------------------------------------
-- parse a type tag
-- ----------------------------------------------------
local typeparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- return { name = result[1].name}
- end,
- '@','type',typenameparser
- }),
- -- parser without name
- gg.sequence({
- builder = function (result)
- return {}
- end,
- '@','type'
- })
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ return { name = result[1].name}
+ end,
+ '@','type',typenameparser
+ }),
+ -- parser without name
+ gg.sequence({
+ builder = function (result)
+ return {}
+ end,
+ '@','type'
+ })
}
-- ----------------------------------------------------
-- parse a module tag
-- ----------------------------------------------------
local moduleparsers = {
- -- full parser
- gg.sequence({
- builder = function (result)
- return { name = result[1].name }
- end,
- '@','module', modulenameparser
- }),
- -- parser without name
- gg.sequence({
- builder = function (result)
- return {}
- end,
- '@','module'
- })
+ -- full parser
+ gg.sequence({
+ builder = function (result)
+ return { name = result[1].name }
+ end,
+ '@','module', modulenameparser
+ }),
+ -- parser without name
+ gg.sequence({
+ builder = function (result)
+ return {}
+ end,
+ '@','module'
+ })
}
-- ----------------------------------------------------
-- parse a third tag
-- ----------------------------------------------------
local thirdtagsparser = gg.sequence({
- builder = function (result)
- return { name = result[1][1] }
- end,
- '@', mlp.id
+ builder = function (result)
+ return { name = result[1][1] }
+ end,
+ '@', mlp.id
})
-- ----------------------------------------------------
-- init parser
-- ----------------------------------------------------
local function initparser()
- -- register parsers
- -- each tag name has several parsers
- registeredparsers = {
- ["module"] = moduleparsers,
- ["return"] = returnparsers,
- ["type"] = typeparsers,
- ["field"] = fieldparsers,
- ["function"] = functionparsers,
- ["param"] = paramparsers,
- ["extends"] = extendsparsers,
- ["list"] = listparsers,
- ["map"] = mapparsers,
- ["callof"] = callofparsers
- }
+ -- register parsers
+ -- each tag name has several parsers
+ registeredparsers = {
+ ["module"] = moduleparsers,
+ ["return"] = returnparsers,
+ ["type"] = typeparsers,
+ ["field"] = fieldparsers,
+ ["function"] = functionparsers,
+ ["param"] = paramparsers,
+ ["extends"] = extendsparsers,
+ ["list"] = listparsers,
+ ["map"] = mapparsers,
+ ["callof"] = callofparsers
+ }
- -- create lexer used for parsing
- lx = lexer.lexer:clone()
- lx.extractors = {
- -- "extract_long_comment",
- -- "extract_short_comment",
- -- "extract_long_string",
- "extract_short_string",
- "extract_word",
- "extract_number",
- "extract_symbol"
- }
+ -- create lexer used for parsing
+ lx = lexer.lexer:clone()
+ lx.extractors = {
+ -- "extract_long_comment",
+ -- "extract_short_comment",
+ -- "extract_long_string",
+ "extract_short_string",
+ "extract_word",
+ "extract_number",
+ "extract_symbol"
+ }
- -- Add dots as keyword
- local tagnames = { '...' }
+ -- Add dots as keyword
+ local tagnames = { '...' }
- -- Add tag names as key word
- for tagname, _ in pairs(registeredparsers) do
- table.insert(tagnames,tagname)
- end
- lx:add(tagnames)
+ -- Add tag names as key word
+ for tagname, _ in pairs(registeredparsers) do
+ table.insert(tagnames,tagname)
+ end
+ lx:add(tagnames)
- return lx, parsers
+ return lx, parsers
end
initparser()
@@ -525,40 +525,40 @@
-- the goal is to fix the indentation problems
-- ----------------------------------------------------
local function getstringtoremove (stringcomment,commentstart)
- local _,_,capture = string.find(stringcomment,"\n?([ \t]*)@[^{]+",commentstart)
- if not capture then
- _,_,capture = string.find(stringcomment,"^([ \t]*)",commentstart)
- end
- capture = string.gsub(capture,"(.)","%1?")
- return capture
+ local _,_,capture = string.find(stringcomment,"\n?([ \t]*)@[^{]+",commentstart)
+ if not capture then
+ _,_,capture = string.find(stringcomment,"^([ \t]*)",commentstart)
+ end
+ capture = string.gsub(capture,"(.)","%1?")
+ return capture
end
-- ----------------------------------------------------
-- parse comment tag partition and return table structure
-- ----------------------------------------------------
local function parsetag(part)
- if part.comment:find("^@") then
- -- check if the part start by a supported tag
- for tagname,parsers in pairs(registeredparsers) do
- if (part.comment:find("^@"..tagname)) then
- -- try the registered parsers for this tag
- local result
- for i, parser in ipairs(parsers) do
- local valid, tag = pcall(parser, lx:newstream(part.comment, tagname .. 'tag lexer'))
- if valid then
- -- add tagname
- tag.tagname = tagname
+ if part.comment:find("^@") then
+ -- check if the part start by a supported tag
+ for tagname,parsers in pairs(registeredparsers) do
+ if (part.comment:find("^@"..tagname)) then
+ -- try the registered parsers for this tag
+ local result
+ for i, parser in ipairs(parsers) do
+ local valid, tag = pcall(parser, lx:newstream(part.comment, tagname .. 'tag lexer'))
+ if valid then
+ -- add tagname
+ tag.tagname = tagname
- -- add description
- local endoffset = tag.lineinfo.last.offset
- tag.description = part.comment:sub(endoffset+2,-1)
- return tag
- end
- end
- end
- end
- end
- return nil
+ -- add description
+ local endoffset = tag.lineinfo.last.offset
+ tag.description = part.comment:sub(endoffset+2,-1)
+ return tag
+ end
+ end
+ end
+ end
+ end
+ return nil
end
-- ----------------------------------------------------
@@ -569,23 +569,23 @@
-- ----------------------------------------------------
local function parsethirdtag( part )
- -- Check it there is someting to process
- if not part.comment:find("^@") then
- return nil, 'No tag to parse'
- end
+ -- Check it there is someting to process
+ if not part.comment:find("^@") then
+ return nil, 'No tag to parse'
+ end
- -- Apply parser
- local status, parsedtag = pcall(thirdtagsparser, lx:newstream(part.comment, 'Third party tag lexer'))
- if not status then
- return nil, "Unable to parse given string."
- end
+ -- Apply parser
+ local status, parsedtag = pcall(thirdtagsparser, lx:newstream(part.comment, 'Third party tag lexer'))
+ if not status then
+ return nil, "Unable to parse given string."
+ end
- -- Retrieve description
- local endoffset = parsedtag.lineinfo.last.offset
- local tag = {
- description = part.comment:sub(endoffset+2,-1)
- }
- return parsedtag.name, tag
+ -- Retrieve description
+ local endoffset = parsedtag.lineinfo.last.offset
+ local tag = {
+ description = part.comment:sub(endoffset+2,-1)
+ }
+ return parsedtag.name, tag
end
-- ---------------------------------------------------------
@@ -595,33 +595,33 @@
-- the others are the part from a tag to the next one
-- ----------------------------------------------------
local function split(stringcomment,commentstart)
- local partstart = commentstart
- local result = {}
+ local partstart = commentstart
+ local result = {}
- -- manage case where the comment start by @
- -- (we must ignore the inline see tag @{..})
- local at_startoffset, at_endoffset = stringcomment:find("^[ \t]*@[^{]",partstart)
- if at_endoffset then
- partstart = at_endoffset-1 -- we start before the @ and the non '{' character
- end
+ -- manage case where the comment start by @
+ -- (we must ignore the inline see tag @{..})
+ local at_startoffset, at_endoffset = stringcomment:find("^[ \t]*@[^{]",partstart)
+ if at_endoffset then
+ partstart = at_endoffset-1 -- we start before the @ and the non '{' character
+ end
- -- split comment
- -- (we must ignore the inline see tag @{..})
- repeat
- at_startoffset, at_endoffset = stringcomment:find("\n[ \t]*@[^{]",partstart)
- local partend
- if at_startoffset then
- partend= at_startoffset-1 -- the end is before the separator pattern (just before the \n)
- else
- partend = #stringcomment -- we don't find any pattern so the end is the end of the string
- end
- table.insert(result, { comment = stringcomment:sub (partstart,partend) ,
- offset = partstart})
- if at_endoffset then
- partstart = at_endoffset-1 -- the new start is befire the @ and the non { char
- end
- until not at_endoffset
- return result
+ -- split comment
+ -- (we must ignore the inline see tag @{..})
+ repeat
+ at_startoffset, at_endoffset = stringcomment:find("\n[ \t]*@[^{]",partstart)
+ local partend
+ if at_startoffset then
+ partend= at_startoffset-1 -- the end is before the separator pattern (just before the \n)
+ else
+ partend = #stringcomment -- we don't find any pattern so the end is the end of the string
+ end
+ table.insert(result, { comment = stringcomment:sub (partstart,partend) ,
+ offset = partstart})
+ if at_endoffset then
+ partstart = at_endoffset-1 -- the new start is befire the @ and the non { char
+ end
+ until not at_endoffset
+ return result
end
@@ -630,120 +630,120 @@
-- ----------------------------------------------------
function M.parse(stringcomment)
- local _comment = {description="", shortdescription=""}
+ local _comment = {description="", shortdescription=""}
- -- clean windows carriage return
- stringcomment = string.gsub(stringcomment,"\r\n","\n")
+ -- clean windows carriage return
+ stringcomment = string.gsub(stringcomment,"\r\n","\n")
- -- check if it's a ld comment
- -- get the begin of the comment
- -- ============================
- if not stringcomment:find("^-") then
- -- if this comment don't start by -, we will not handle it.
- return nil
- end
+ -- check if it's a ld comment
+ -- get the begin of the comment
+ -- ============================
+ if not stringcomment:find("^-") then
+ -- if this comment don't start by -, we will not handle it.
+ return nil
+ end
- -- retrieve the real start
- local commentstart = 2 --after the first hyphen
- -- if the first line is an empty comment line with at least 3 hyphens we ignore it
- local _ , endoffset = stringcomment:find("^-+[ \t]*\n")
- if endoffset then
- commentstart = endoffset+1
- end
+ -- retrieve the real start
+ local commentstart = 2 --after the first hyphen
+ -- if the first line is an empty comment line with at least 3 hyphens we ignore it
+ local _ , endoffset = stringcomment:find("^-+[ \t]*\n")
+ if endoffset then
+ commentstart = endoffset+1
+ end
- -- clean comments
- -- ===================
- -- remove line of "-"
- stringcomment = string.sub(stringcomment,commentstart)
- -- clean indentation
- local pattern = getstringtoremove (stringcomment,1)
- stringcomment = string.gsub(stringcomment,"^"..pattern,"")
- stringcomment = string.gsub(stringcomment,"\n"..pattern,"\n")
+ -- clean comments
+ -- ===================
+ -- remove line of "-"
+ stringcomment = string.sub(stringcomment,commentstart)
+ -- clean indentation
+ local pattern = getstringtoremove (stringcomment,1)
+ stringcomment = string.gsub(stringcomment,"^"..pattern,"")
+ stringcomment = string.gsub(stringcomment,"\n"..pattern,"\n")
- -- split comment part
- -- ====================
- local commentparts = split(stringcomment, 1)
+ -- split comment part
+ -- ====================
+ local commentparts = split(stringcomment, 1)
- -- Extract descriptions
- -- ====================
- local firstpart = commentparts[1].comment
- if firstpart:find("^[^@]") or firstpart:find("^@{") then
- -- if the comment part don't start by @
- -- it's the part which contains descriptions
- -- (there are an exception for the in-line see tag @{..})
- local shortdescription, description = string.match(firstpart,'^(.-[.?])(%s.+)')
- -- store description
- if shortdescription then
- _comment.shortdescription = shortdescription
- -- clean description
- -- remove always the first space character
- -- (this manage the case short and long description is on the same line)
- description = string.gsub(description, "^[ \t]","")
- -- if first line is only an empty string remove it
- description = string.gsub(description, "^[ \t]*\n","")
- _comment.description = description
- else
- _comment.shortdescription = firstpart
- _comment.description = ""
- end
- end
+ -- Extract descriptions
+ -- ====================
+ local firstpart = commentparts[1].comment
+ if firstpart:find("^[^@]") or firstpart:find("^@{") then
+ -- if the comment part don't start by @
+ -- it's the part which contains descriptions
+ -- (there are an exception for the in-line see tag @{..})
+ local shortdescription, description = string.match(firstpart,'^(.-[.?])(%s.+)')
+ -- store description
+ if shortdescription then
+ _comment.shortdescription = shortdescription
+ -- clean description
+ -- remove always the first space character
+ -- (this manage the case short and long description is on the same line)
+ description = string.gsub(description, "^[ \t]","")
+ -- if first line is only an empty string remove it
+ description = string.gsub(description, "^[ \t]*\n","")
+ _comment.description = description
+ else
+ _comment.shortdescription = firstpart
+ _comment.description = ""
+ end
+ end
- -- Extract tags
- -- ===================
- -- Parse regular tags
- local tag
- for i, part in ipairs(commentparts) do
- tag = parsetag(part)
- --if it's a supported tag (so tag is not nil, it's a table)
- if tag then
- if not _comment.tags then _comment.tags = {} end
- if not _comment.tags[tag.tagname] then
- _comment.tags[tag.tagname] = {}
- end
- table.insert(_comment.tags[tag.tagname], tag)
- else
+ -- Extract tags
+ -- ===================
+ -- Parse regular tags
+ local tag
+ for i, part in ipairs(commentparts) do
+ tag = parsetag(part)
+ --if it's a supported tag (so tag is not nil, it's a table)
+ if tag then
+ if not _comment.tags then _comment.tags = {} end
+ if not _comment.tags[tag.tagname] then
+ _comment.tags[tag.tagname] = {}
+ end
+ table.insert(_comment.tags[tag.tagname], tag)
+ else
- -- Try user defined tags, so far they will look like
- -- @identifier description
- local tagname, thirdtag = parsethirdtag( part )
- if tagname then
- --
- -- Append found tag
- --
- local reservedname = 'unknowntags'
- if not _comment.unknowntags then
- _comment.unknowntags = {}
- end
+ -- Try user defined tags, so far they will look like
+ -- @identifier description
+ local tagname, thirdtag = parsethirdtag( part )
+ if tagname then
+ --
+ -- Append found tag
+ --
+ local reservedname = 'unknowntags'
+ if not _comment.unknowntags then
+ _comment.unknowntags = {}
+ end
- -- Create specific section for parsed tag
- if not _comment.unknowntags[tagname] then
- _comment.unknowntags[tagname] = {}
- end
- -- Append to specific section
- table.insert(_comment.unknowntags[tagname], thirdtag)
- end
- end
- end
- return _comment
+ -- Create specific section for parsed tag
+ if not _comment.unknowntags[tagname] then
+ _comment.unknowntags[tagname] = {}
+ end
+ -- Append to specific section
+ table.insert(_comment.unknowntags[tagname], thirdtag)
+ end
+ end
+ end
+ return _comment
end
function M.parseinlinecomment(stringcomment)
- --TODO this code is use to activate typage only on --- comments. (deactivate for now)
- -- if not stringcomment or not stringcomment:find("^-") then
- -- -- if this comment don't start by -, we will not handle it.
- -- return nil
- -- end
- -- -- remove the first '-'
- -- stringcomment = string.sub(stringcomment,2)
- -- print (stringcomment)
- -- io.flush()
- local valid, parsedtag = pcall(typerefparser, lx:newstream(stringcomment, 'typeref parser'))
- if valid then
- local endoffset = parsedtag.lineinfo.last.offset
- parsedtag.description = stringcomment:sub(endoffset+2,-1)
- return parsedtag
- end
+ --TODO this code is use to activate typage only on --- comments. (deactivate for now)
+ -- if not stringcomment or not stringcomment:find("^-") then
+ -- -- if this comment don't start by -, we will not handle it.
+ -- return nil
+ -- end
+ -- -- remove the first '-'
+ -- stringcomment = string.sub(stringcomment,2)
+ -- print (stringcomment)
+ -- io.flush()
+ local valid, parsedtag = pcall(typerefparser, lx:newstream(stringcomment, 'typeref parser'))
+ if valid then
+ local endoffset = parsedtag.lineinfo.last.offset
+ parsedtag.description = stringcomment:sub(endoffset+2,-1)
+ return parsedtag
+ end
end
return M
diff --git a/libraries/templateengine/templateengine.lua b/libraries/templateengine/templateengine.lua
index 63fde13..e1256f0 100644
--- a/libraries/templateengine/templateengine.lua
+++ b/libraries/templateengine/templateengine.lua
@@ -21,94 +21,94 @@
-- apply template to the given element
function M.applytemplate(elem, ident, templatetype,...)
- -- define environment
- local env = M.getenv(elem, ident,...)
+ -- define environment
+ local env = M.getenv(elem, ident,...)
- -- load template
- local template = M.gettemplate(elem,templatetype)
- if not template then
- templatetype = templatetype and string.format(' "%s"', templatetype) or ''
- local elementname = string.format(' for %s', elem.tag or 'untagged element')
- error(string.format('Unable to load %s template %s', templatetype, elementname))
- end
+ -- load template
+ local template = M.gettemplate(elem,templatetype)
+ if not template then
+ templatetype = templatetype and string.format(' "%s"', templatetype) or ''
+ local elementname = string.format(' for %s', elem.tag or 'untagged element')
+ error(string.format('Unable to load %s template %s', templatetype, elementname))
+ end
- -- apply template
- local str, err = pltemplate.substitute(template, env)
+ -- apply template
+ local str, err = pltemplate.substitute(template, env)
- --manage errors
- if not str then
- local templateerror = templatetype and string.format(' parsing "%s" template ', templatetype) or ''
- error(string.format('An error occured%s for "%s"\n%s',templateerror, elem.tag, err))
- end
- return str
+ --manage errors
+ if not str then
+ local templateerror = templatetype and string.format(' parsing "%s" template ', templatetype) or ''
+ error(string.format('An error occured%s for "%s"\n%s',templateerror, elem.tag, err))
+ end
+ return str
end
-- get the a new environment for this element
function M.getenv(elem, ident,...)
- local currentenv ={}
- for k,v in pairs(M.env) do currentenv[k] = v end
- if elem and elem.tag then
- currentenv['_'..elem.tag]= elem
- end
- currentenv['i']= ident or 1
- currentenv['templateparams']= {...}
- return currentenv
+ local currentenv ={}
+ for k,v in pairs(M.env) do currentenv[k] = v end
+ if elem and elem.tag then
+ currentenv['_'..elem.tag]= elem
+ end
+ currentenv['i']= ident or 1
+ currentenv['templateparams']= {...}
+ return currentenv
end
-- get the template for this element
function M.gettemplate(elem,templatetype)
- local tag = elem and elem.tag
- if tag then
- if templatetype then
- return require ("template." .. templatetype.. "." .. tag)
- else
- return require ("template." .. tag)
- end
- end
+ local tag = elem and elem.tag
+ if tag then
+ if templatetype then
+ return require ("template." .. templatetype.. "." .. tag)
+ else
+ return require ("template." .. tag)
+ end
+ end
end
---
-- Allow user to format text in descriptions.
-- Default implementation replaces @{---} tags with links and apply markdown.
--- @return #string
+-- @return #string
local function format(string)
- -- Allow to replace encountered tags with valid links
- local replace = function(found)
- local apiobj = M.env.getelement(found)
- if apiobj then
- return M.env.fulllinkto(apiobj)
- end
- return found
- end
- string = string:gsub('@{%s*(.-)%s*}', replace)
- return M.env.markdown( string )
+ -- Allow to replace encountered tags with valid links
+ local replace = function(found)
+ local apiobj = M.env.getelement(found)
+ if apiobj then
+ return M.env.fulllinkto(apiobj)
+ end
+ return found
+ end
+ string = string:gsub('@{%s*(.-)%s*}', replace)
+ return M.env.markdown( string )
end
---
-- Provide a full link to an element using `prettyname` and `linkto`.
-- Default implementation is for HTML.
local function fulllinkto(o,...)
- local ref = M.env.linkto(o,...)
- local name = M.env.prettyname(o,...)
- if not ref then
- return name
- end
- return string.format('<a href="%s">%s</a>', ref, name)
+ local ref = M.env.linkto(o,...)
+ local name = M.env.prettyname(o,...)
+ if not ref then
+ return name
+ end
+ return string.format('<a href="%s">%s</a>', ref, name)
end
--
-- Define default template environnement
--
local defaultenv = {
- table = table,
- ipairs = ipairs,
- pairs = pairs,
- markdown = markdown,
- applytemplate = M.applytemplate,
- format = format,
- linkto = function(str) return str end,
- fulllinkto = fulllinkto,
- prettyname = function(s) return s end,
- getelement = function(s) return nil end
+ table = table,
+ ipairs = ipairs,
+ pairs = pairs,
+ markdown = markdown,
+ applytemplate = M.applytemplate,
+ format = format,
+ linkto = function(str) return str end,
+ fulllinkto = fulllinkto,
+ prettyname = function(s) return s end,
+ getelement = function(s) return nil end
}
-- this is the global env accessible in the templates
diff --git a/plugins/org.eclipse.ldt.debug.core/script/internal/debugger/transport/java.lua b/plugins/org.eclipse.ldt.debug.core/script/internal/debugger/transport/java.lua
index 6fdb0fa..4ee272f 100644
--- a/plugins/org.eclipse.ldt.debug.core/script/internal/debugger/transport/java.lua
+++ b/plugins/org.eclipse.ldt.debug.core/script/internal/debugger/transport/java.lua
@@ -1,168 +1,168 @@
--------------------------------------------------------------------------------
--- Copyright (c) 2011-2012 Sierra Wireless and others.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Sierra Wireless - initial API and implementation
--------------------------------------------------------------------------------
-
--------------------------------------------------------------------------------
--- JavaSocket backend for DBGP debugger.
--- @module debugger.transport.java
-
--- the main part of code is defined in the TransportLayerModule.java class
--- this file is just a wrapper to workaround some problems and be more compliant with defined API.
-
-local javamodule = require ("debugger.transport.javasocket")
-local M = {}
-
-
---------------------------------------------------------------------------------
--- Client socket to be connected to DBGP server.
--- @type socket
-local socket = {}
-
---------------------------------------------------------------------------------
--- Connect socket to given server.
--- @function [parent=#socket] connect
--- @param self
--- @param #string host name or ip address.
--- @param #number port number.
--- @return true on success.
--- @return nil, error message on failure.
-function socket:connect (host,port)
- local status, res = pcall(function() return self.wrapper:connect(host,port) end)
- if status then
- return true
- else
- return nil, res
- end
-end
-
---------------------------------------------------------------------------------
--- Reads some data from socket.
--- @function [parent=#socket] receive
--- @param self
--- @param #number number of bytes to read.
--- @return #string read data on success.
--- @return nil, error message, partial buffer on failure.
-function socket:receive (number)
-
- local function receive()
- local res = {}
- for i=1,number do
- local data = self.wrapper:receive()
- if (data =="") then
- res[#res+1] = "\000"
- else
- res[#res+1] = data
- end
- end
- return table.concat(res,"")
- end
-
- local status, res = pcall(receive)
- if status then
- return res
- else
- return nil, res
- end
-end
-
---------------------------------------------------------------------------------
--- Send data to server.
--- @function [parent=#socket] send
--- @param self
--- @param #string buffer to send.
--- @return true on success.
--- @return nil, error message on failure.
-function socket:send (buffer)
- return pcall(function () string.gsub(buffer,"([^%z]+)",
- function (data)
- self.wrapper:send(data)
- self.wrapper:send("")
- end)
- end)
-end
-
---------------------------------------------------------------------------------
--- Set socket blocking or not.
---
--- The name is borrowed from LuaSocket, but the actual usage is just fully
--- blocking or non-blocking.
---
--- @function [parent=#socket] settimeout
--- @param self
--- @param nil to set non-blocking, any other value to set blocking.
--- @return true on success.
--- @return nil, error message on failure.
-
-function socket:settimeout (nonblocking)
- return pcall(function() return self.wrapper:settimeout(nonblocking) end)
-end
-
---------------------------------------------------------------------------------
--- Closes the socket.
--- @function [parent=#socket] close
--- @param self
--- @return true.
-function socket:close ()
- return pcall(function() return self.wrapper:close() end)
-end
-
---------------------------------------------------------------------------------
--- Create a new TCP socket, not yet connected to anything.
--- @function [parent=#debugger.transport.java] create
--- @return #socket the created socket
-function M.create ()
- local status, res = pcall(function() return javamodule.create() end)
- if status then
- local t = setmetatable({wrapper = res},{__index = socket})
- return t
- else
- return nil, res
- end
-end
-
---------------------------------------------------------------------------------
--- Wait for some time. Minimum precision is not defined strictly but should be
--- a millisecond resolution at least.
--- @function [parent=#debugger.transport.java] sleep
--- @param #number time amount of time to wait in seconds (decimal numbers
--- allowed).
-function M.sleep (time)
- javamodule.sleep(time)
-end
-
---------------------------------------------------------------------------------
--- Encode a string to its Base64 representation.
--- @function [parent=#debugger.transport.java] rawb64
--- @param #string input content to encode.
--- @return #string Base64 encoded string.
-function M.rawb64 (input)
- return javamodule.rawb64(input)
-end
-
---------------------------------------------------------------------------------
--- Encode a string to its Base64 representation with lines wrapped at 76
--- characters.
--- @function [parent=#debugger.transport.java] b64
--- @param #string input content to encode.
--- @return #string Base64 encoded string.
-function M.b64 (input)
- return javamodule.b64(input)
-end
-
---------------------------------------------------------------------------------
--- Decode a Base64 encoded string.
--- @function [parent=#debugger.transport.java] unb64
--- @param #string input Base64 encoded string.
--- @return #string decoded string.
-function M.unb64 (input)
- return javamodule.unb64(input)
-end
-
-return M
+-------------------------------------------------------------------------------
+-- Copyright (c) 2011-2012 Sierra Wireless and others.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Sierra Wireless - initial API and implementation
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+-- JavaSocket backend for DBGP debugger.
+-- @module debugger.transport.java
+
+-- the main part of code is defined in the TransportLayerModule.java class
+-- this file is just a wrapper to workaround some problems and be more compliant with defined API.
+
+local javamodule = require ("debugger.transport.javasocket")
+local M = {}
+
+
+--------------------------------------------------------------------------------
+-- Client socket to be connected to DBGP server.
+-- @type socket
+local socket = {}
+
+--------------------------------------------------------------------------------
+-- Connect socket to given server.
+-- @function [parent=#socket] connect
+-- @param self
+-- @param #string host name or ip address.
+-- @param #number port number.
+-- @return true on success.
+-- @return nil, error message on failure.
+function socket:connect (host,port)
+ local status, res = pcall(function() return self.wrapper:connect(host,port) end)
+ if status then
+ return true
+ else
+ return nil, res
+ end
+end
+
+--------------------------------------------------------------------------------
+-- Reads some data from socket.
+-- @function [parent=#socket] receive
+-- @param self
+-- @param #number number of bytes to read.
+-- @return #string read data on success.
+-- @return nil, error message, partial buffer on failure.
+function socket:receive (number)
+
+ local function receive()
+ local res = {}
+ for i=1,number do
+ local data = self.wrapper:receive()
+ if (data =="") then
+ res[#res+1] = "\000"
+ else
+ res[#res+1] = data
+ end
+ end
+ return table.concat(res,"")
+ end
+
+ local status, res = pcall(receive)
+ if status then
+ return res
+ else
+ return nil, res
+ end
+end
+
+--------------------------------------------------------------------------------
+-- Send data to server.
+-- @function [parent=#socket] send
+-- @param self
+-- @param #string buffer to send.
+-- @return true on success.
+-- @return nil, error message on failure.
+function socket:send (buffer)
+ return pcall(function () string.gsub(buffer,"([^%z]+)",
+ function (data)
+ self.wrapper:send(data)
+ self.wrapper:send("")
+ end)
+ end)
+end
+
+--------------------------------------------------------------------------------
+-- Set socket blocking or not.
+--
+-- The name is borrowed from LuaSocket, but the actual usage is just fully
+-- blocking or non-blocking.
+--
+-- @function [parent=#socket] settimeout
+-- @param self
+-- @param nil to set non-blocking, any other value to set blocking.
+-- @return true on success.
+-- @return nil, error message on failure.
+
+function socket:settimeout (nonblocking)
+ return pcall(function() return self.wrapper:settimeout(nonblocking) end)
+end
+
+--------------------------------------------------------------------------------
+-- Closes the socket.
+-- @function [parent=#socket] close
+-- @param self
+-- @return true.
+function socket:close ()
+ return pcall(function() return self.wrapper:close() end)
+end
+
+--------------------------------------------------------------------------------
+-- Create a new TCP socket, not yet connected to anything.
+-- @function [parent=#debugger.transport.java] create
+-- @return #socket the created socket
+function M.create ()
+ local status, res = pcall(function() return javamodule.create() end)
+ if status then
+ local t = setmetatable({wrapper = res},{__index = socket})
+ return t
+ else
+ return nil, res
+ end
+end
+
+--------------------------------------------------------------------------------
+-- Wait for some time. Minimum precision is not defined strictly but should be
+-- a millisecond resolution at least.
+-- @function [parent=#debugger.transport.java] sleep
+-- @param #number time amount of time to wait in seconds (decimal numbers
+-- allowed).
+function M.sleep (time)
+ javamodule.sleep(time)
+end
+
+--------------------------------------------------------------------------------
+-- Encode a string to its Base64 representation.
+-- @function [parent=#debugger.transport.java] rawb64
+-- @param #string input content to encode.
+-- @return #string Base64 encoded string.
+function M.rawb64 (input)
+ return javamodule.rawb64(input)
+end
+
+--------------------------------------------------------------------------------
+-- Encode a string to its Base64 representation with lines wrapped at 76
+-- characters.
+-- @function [parent=#debugger.transport.java] b64
+-- @param #string input content to encode.
+-- @return #string Base64 encoded string.
+function M.b64 (input)
+ return javamodule.b64(input)
+end
+
+--------------------------------------------------------------------------------
+-- Decode a Base64 encoded string.
+-- @function [parent=#debugger.transport.java] unb64
+-- @param #string input Base64 encoded string.
+-- @return #string decoded string.
+function M.unb64 (input)
+ return javamodule.unb64(input)
+end
+
+return M
diff --git a/plugins/org.eclipse.ldt/script/local/javaapimodelbuilder.lua b/plugins/org.eclipse.ldt/script/local/javaapimodelbuilder.lua
index 53102c0..b7ff5bc 100644
--- a/plugins/org.eclipse.ldt/script/local/javaapimodelbuilder.lua
+++ b/plugins/org.eclipse.ldt/script/local/javaapimodelbuilder.lua
@@ -41,14 +41,14 @@
-- Restore link generators
local function enablelinks()
- templateengine.env.linkto = originallinkto
+ templateengine.env.linkto = originallinkto
end
-- Disable link generators
local function disablelinks()
- templateengine.env.linkto = function()
- return nil, 'Link generation is disabled.'
- end
+ templateengine.env.linkto = function()
+ return nil, 'Link generation is disabled.'
+ end
end
-- Links are disabled by default
@@ -56,95 +56,66 @@
-- Handle only local item references
templateengineenv.linktypes['item'] = function(item)
- if item.parent and item.parent.tag == 'recordtypedef' then
- return string.format('#%s.%s', templateengineenv.anchor(item.parent), item.name)
- end
- return string.format('#%s', templateengineenv.anchor(item))
+ if item.parent and item.parent.tag == 'recordtypedef' then
+ return string.format('#%s.%s', templateengineenv.anchor(item.parent), item.name)
+ end
+ return string.format('#%s', templateengineenv.anchor(item))
end
-- Perform actual environment update
for functionname, body in pairs( templateengineenv ) do
- templateengine.env[ functionname ] = body
+ templateengine.env[ functionname ] = body
end
-- create typeref
function M._typeref (_type,handledexpr)
- if not _type then return nil end
- if _type.tag == "externaltyperef" then
- return javaapimodelfactory.newexternaltyperef(_type.modulename, _type.typename)
- elseif _type.tag == "internaltyperef" then
- return javaapimodelfactory.newinternaltyperef(_type.typename)
- elseif _type.tag == "moduletyperef" then
- return javaapimodelfactory.newmoduletyperef(_type.modulename,_type.returnposition)
- elseif _type.tag == "exprtyperef" then
- return javaapimodelfactory.newexprtyperef(_type.returnposition,javaexpressionbuilder._expression(_type.expression,handledexpr))
- elseif _type.tag == "primitivetyperef" then
- return javaapimodelfactory.newprimitivetyperef(_type.typename)
- elseif _type.tag == "inlinetyperef" then
- return javaapimodelfactory.newinlinetyperef(M._typedef(_type.def,true,handledexpr))
- end
+ if not _type then return nil end
+ if _type.tag == "externaltyperef" then
+ return javaapimodelfactory.newexternaltyperef(_type.modulename, _type.typename)
+ elseif _type.tag == "internaltyperef" then
+ return javaapimodelfactory.newinternaltyperef(_type.typename)
+ elseif _type.tag == "moduletyperef" then
+ return javaapimodelfactory.newmoduletyperef(_type.modulename,_type.returnposition)
+ elseif _type.tag == "exprtyperef" then
+ return javaapimodelfactory.newexprtyperef(_type.returnposition,javaexpressionbuilder._expression(_type.expression,handledexpr))
+ elseif _type.tag == "primitivetyperef" then
+ return javaapimodelfactory.newprimitivetyperef(_type.typename)
+ elseif _type.tag == "inlinetyperef" then
+ return javaapimodelfactory.newinlinetyperef(M._typedef(_type.def,true,handledexpr))
+ end
end
-- create item
function M._item(_item,notemplate,handledexpr)
- local description = ""
- if notemplate then
- local t = {}
- if _item.shortdescription and _item.shortdescription ~= "" then table.insert(t,_item.shortdescription) end
- if _item.description and _item.description ~= "" then table.insert(t,_item.description) end
- if #t ~= 0 then description = string.format("<div style='white-space:pre'>%s</div>",table.concat(t,"\n") ) end
- else
- description = templateengine.applytemplate(_item, 3)
- end
+ local description = ""
+ if notemplate then
+ local t = {}
+ if _item.shortdescription and _item.shortdescription ~= "" then table.insert(t,_item.shortdescription) end
+ if _item.description and _item.description ~= "" then table.insert(t,_item.description) end
+ if #t ~= 0 then description = string.format("<div style='white-space:pre'>%s</div>",table.concat(t,"\n") ) end
+ else
+ description = templateengine.applytemplate(_item, 3)
+ end
- local jitem = javaapimodelfactory.newitem(_item.name,
- description,
- _item.sourcerange.min - 1,
- _item.sourcerange.max,
- M._typeref(_item.type,handledexpr)
- )
- handledexpr[_item] = jitem
- return jitem
+ local jitem = javaapimodelfactory.newitem(_item.name,
+ description,
+ _item.sourcerange.min - 1,
+ _item.sourcerange.max,
+ M._typeref(_item.type,handledexpr)
+ )
+ handledexpr[_item] = jitem
+ return jitem
end
-- create typedef
function M._typedef(_typedef,notemplate, handledexpr)
- local jtypedef
- -- Dealing with records
- if _typedef.tag == "recordtypedef" then
+ local jtypedef
+ -- Dealing with records
+ if _typedef.tag == "recordtypedef" then
- -- manage description
- local description = ""
- if notemplate then
- local t = {}
- if _typedef.shortdescription and _typedef.shortdescription ~= "" then table.insert(t,_typedef.shortdescription) end
- if _typedef.description and _typedef.description ~= "" then table.insert(t,_typedef.description) end
- if #t ~= 0 then description = string.format("<div style='white-space:pre'>%s</div>",table.concat(t,"\n") ) end
- else
- description = templateengine.applytemplate(_typedef, 3)
- end
-
- jtypedef = javaapimodelfactory.newrecordtypedef(_typedef.name,
- description,
- _typedef.sourcerange.min - 1,
- _typedef.sourcerange.max,
- M._typeref(_typedef.supertype,handledexpr),
- M._typeref(_typedef.defaultkeytyperef,handledexpr),
- M._typeref(_typedef.defaultvaluetyperef,handledexpr),
- M._typeref(_typedef.call,handledexpr)
- )
-
- -- Appending fields
- for _, _item in pairs(_typedef.fields) do
- local jitem = M._item(_item,notemplate,handledexpr)
- javaapimodelfactory.addfield(jtypedef, jitem)
- end
-
- elseif _typedef.tag == "functiontypedef" then
-
- -- manage description
- local description = ""
- if notemplate then
+ -- manage description
+ local description = ""
+ if notemplate then
local t = {}
if _typedef.shortdescription and _typedef.shortdescription ~= "" then table.insert(t,_typedef.shortdescription) end
if _typedef.description and _typedef.description ~= "" then table.insert(t,_typedef.description) end
@@ -152,62 +123,91 @@
else
description = templateengine.applytemplate(_typedef, 3)
end
-
- -- Dealing with function
- jtypedef = javaapimodelfactory.newfunctiontypedef(description)
- -- Appending parameters
- for _, _param in ipairs(_typedef.params) do
- javaapimodelfactory.addparam(jtypedef,_param.name, M._typeref(_param.type, handledexpr), _param.description)
- end
+ jtypedef = javaapimodelfactory.newrecordtypedef(_typedef.name,
+ description,
+ _typedef.sourcerange.min - 1,
+ _typedef.sourcerange.max,
+ M._typeref(_typedef.supertype,handledexpr),
+ M._typeref(_typedef.defaultkeytyperef,handledexpr),
+ M._typeref(_typedef.defaultvaluetyperef,handledexpr),
+ M._typeref(_typedef.call,handledexpr)
+ )
- -- Appending returned types
- for _, _return in ipairs(_typedef.returns) do
- local jreturn = javaapimodelfactory.newreturn()
- for _, _type in ipairs( _return.types ) do
- javaapimodelfactory.addtype(jreturn,M._typeref(_type,handledexpr))
- end
- javaapimodelfactory.functionaddreturn(jtypedef,jreturn)
- end
- end
- return jtypedef
+ -- Appending fields
+ for _, _item in pairs(_typedef.fields) do
+ local jitem = M._item(_item,notemplate,handledexpr)
+ javaapimodelfactory.addfield(jtypedef, jitem)
+ end
+
+ elseif _typedef.tag == "functiontypedef" then
+
+ -- manage description
+ local description = ""
+ if notemplate then
+ local t = {}
+ if _typedef.shortdescription and _typedef.shortdescription ~= "" then table.insert(t,_typedef.shortdescription) end
+ if _typedef.description and _typedef.description ~= "" then table.insert(t,_typedef.description) end
+ if #t ~= 0 then description = string.format("<div style='white-space:pre'>%s</div>",table.concat(t,"\n") ) end
+ else
+ description = templateengine.applytemplate(_typedef, 3)
+ end
+
+ -- Dealing with function
+ jtypedef = javaapimodelfactory.newfunctiontypedef(description)
+
+ -- Appending parameters
+ for _, _param in ipairs(_typedef.params) do
+ javaapimodelfactory.addparam(jtypedef,_param.name, M._typeref(_param.type, handledexpr), _param.description)
+ end
+
+ -- Appending returned types
+ for _, _return in ipairs(_typedef.returns) do
+ local jreturn = javaapimodelfactory.newreturn()
+ for _, _type in ipairs( _return.types ) do
+ javaapimodelfactory.addtype(jreturn,M._typeref(_type,handledexpr))
+ end
+ javaapimodelfactory.functionaddreturn(jtypedef,jreturn)
+ end
+ end
+ return jtypedef
end
-- create lua file api
function M._file(_file)
- local handledexpr = {}
+ local handledexpr = {}
- -- Enable links just for module file objects
- enablelinks()
- local jfile
- if _file.name then
- jfile = javaapimodelfactory.newfileapi(templateengine.applytemplate(_file, 1))
- else
- jfile = javaapimodelfactory.newfileapi()
- end
- disablelinks()
+ -- Enable links just for module file objects
+ enablelinks()
+ local jfile
+ if _file.name then
+ jfile = javaapimodelfactory.newfileapi(templateengine.applytemplate(_file, 1))
+ else
+ jfile = javaapimodelfactory.newfileapi()
+ end
+ disablelinks()
- -- Adding global variables
- for _, _item in pairs(_file.globalvars) do
- -- Fill Java item
- javaapimodelfactory.addglobalvar(jfile,M._item(_item,false,handledexpr))
- end
+ -- Adding global variables
+ for _, _item in pairs(_file.globalvars) do
+ -- Fill Java item
+ javaapimodelfactory.addglobalvar(jfile,M._item(_item,false,handledexpr))
+ end
- -- Adding returned types
- for _, _return in ipairs(_file.returns) do
- local jreturn = javaapimodelfactory.newreturn()
- for _, _type in ipairs( _return.types ) do
- javaapimodelfactory.addtype(jreturn,M._typeref(_type,handledexpr))
- end
- javaapimodelfactory.fileapiaddreturn(jfile,jreturn)
- end
+ -- Adding returned types
+ for _, _return in ipairs(_file.returns) do
+ local jreturn = javaapimodelfactory.newreturn()
+ for _, _type in ipairs( _return.types ) do
+ javaapimodelfactory.addtype(jreturn,M._typeref(_type,handledexpr))
+ end
+ javaapimodelfactory.fileapiaddreturn(jfile,jreturn)
+ end
- -- Adding types defined in files
- for _, _typedef in pairs(_file.types) do
- javaapimodelfactory.addtypedef(jfile,_typedef.name,M._typedef(_typedef,false,handledexpr))
- end
+ -- Adding types defined in files
+ for _, _typedef in pairs(_file.types) do
+ javaapimodelfactory.addtypedef(jfile,_typedef.name,M._typedef(_typedef,false,handledexpr))
+ end
- return jfile, handledexpr
+ return jfile, handledexpr
end
return M
diff --git a/plugins/org.eclipse.ldt/script/local/javaexpressionbuilder.lua b/plugins/org.eclipse.ldt/script/local/javaexpressionbuilder.lua
index 4f791b1..ab605a6 100644
--- a/plugins/org.eclipse.ldt/script/local/javaexpressionbuilder.lua
+++ b/plugins/org.eclipse.ldt/script/local/javaexpressionbuilder.lua
@@ -1,85 +1,85 @@
---------------------------------------------------------------------------------
--- Copyright (c) 2013 Sierra Wireless.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- - initial API and implementation and initial documentation
---------------------------------------------------------------------------------
-
-local javainternalmodelfactory = require 'javainternalmodelfactory'
-
-local J = {}
-
---------------------------------------
--- create expression java object
-function J._expression(_expr,handledexpr)
- -- search if already handled
- if handledexpr and handledexpr[_expr] then return handledexpr[_expr] end
-
- -- else handle it
- local tag = _expr.tag
- if tag == "MIdentifier" then
- return J._identifier(_expr,handledexpr)
- elseif tag == "MIndex" then
- return J._index(_expr,handledexpr)
- elseif tag == "MCall" then
- return J._call(_expr,handledexpr)
- elseif tag == "MInvoke" then
- return J._invoke(_expr,handledexpr)
- end
- return nil
-end
-
---------------------------------------
--- create identifier java object
- function J._identifier(_identifier,handledexpr)
- local jidentifier = javainternalmodelfactory.newidentifier(
- _identifier.sourcerange.min - 1,
- _identifier.sourcerange.max
- )
- handledexpr[_identifier] =jidentifier
- return jidentifier
-end
-
---------------------------------------
--- create index java object
-function J._index(_index,handledexpr)
- local jindex = javainternalmodelfactory.newindex(
- _index.sourcerange.min -1,
- _index.sourcerange.max,
- J._expression(_index.left,handledexpr),
- _index.right
- )
- handledexpr[_index] =jindex
- return jindex
-end
-
---------------------------------------
--- create call java object
-function J._call(_call,handledexpr)
- local jcall = javainternalmodelfactory.newcall(
- _call.sourcerange.min - 1,
- _call.sourcerange.max,
- J._expression(_call.func,handledexpr)
- )
- handledexpr[_call] =jcall
- return jcall
-end
-
---------------------------------------
--- create invoke java object
-function J._invoke(_invoke,handledexpr)
- local jinvoke = javainternalmodelfactory.newinvoke(
- _invoke.sourcerange.min - 1,
- _invoke.sourcerange.max,
- _invoke.functionname,
- J._expression(_invoke.record,handledexpr)
- )
- handledexpr[_invoke] =jinvoke
- return jinvoke
-end
-
-return J
\ No newline at end of file
+--------------------------------------------------------------------------------
+-- Copyright (c) 2013 Sierra Wireless.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- - initial API and implementation and initial documentation
+--------------------------------------------------------------------------------
+
+local javainternalmodelfactory = require 'javainternalmodelfactory'
+
+local J = {}
+
+--------------------------------------
+-- create expression java object
+function J._expression(_expr,handledexpr)
+ -- search if already handled
+ if handledexpr and handledexpr[_expr] then return handledexpr[_expr] end
+
+ -- else handle it
+ local tag = _expr.tag
+ if tag == "MIdentifier" then
+ return J._identifier(_expr,handledexpr)
+ elseif tag == "MIndex" then
+ return J._index(_expr,handledexpr)
+ elseif tag == "MCall" then
+ return J._call(_expr,handledexpr)
+ elseif tag == "MInvoke" then
+ return J._invoke(_expr,handledexpr)
+ end
+ return nil
+end
+
+--------------------------------------
+-- create identifier java object
+function J._identifier(_identifier,handledexpr)
+ local jidentifier = javainternalmodelfactory.newidentifier(
+ _identifier.sourcerange.min - 1,
+ _identifier.sourcerange.max
+ )
+ handledexpr[_identifier] =jidentifier
+ return jidentifier
+end
+
+--------------------------------------
+-- create index java object
+function J._index(_index,handledexpr)
+ local jindex = javainternalmodelfactory.newindex(
+ _index.sourcerange.min -1,
+ _index.sourcerange.max,
+ J._expression(_index.left,handledexpr),
+ _index.right
+ )
+ handledexpr[_index] =jindex
+ return jindex
+end
+
+--------------------------------------
+-- create call java object
+function J._call(_call,handledexpr)
+ local jcall = javainternalmodelfactory.newcall(
+ _call.sourcerange.min - 1,
+ _call.sourcerange.max,
+ J._expression(_call.func,handledexpr)
+ )
+ handledexpr[_call] =jcall
+ return jcall
+end
+
+--------------------------------------
+-- create invoke java object
+function J._invoke(_invoke,handledexpr)
+ local jinvoke = javainternalmodelfactory.newinvoke(
+ _invoke.sourcerange.min - 1,
+ _invoke.sourcerange.max,
+ _invoke.functionname,
+ J._expression(_invoke.record,handledexpr)
+ )
+ handledexpr[_invoke] =jinvoke
+ return jinvoke
+end
+
+return J
diff --git a/plugins/org.eclipse.ldt/script/local/javainternalmodelbuilder.lua b/plugins/org.eclipse.ldt/script/local/javainternalmodelbuilder.lua
index d707443..d193c50 100644
--- a/plugins/org.eclipse.ldt/script/local/javainternalmodelbuilder.lua
+++ b/plugins/org.eclipse.ldt/script/local/javainternalmodelbuilder.lua
@@ -14,91 +14,91 @@
local javaexpressionbuilder = require 'javaexpressionbuilder'
local javainternalmodelfactory = require 'javainternalmodelfactory'
-local javaapimodelfactory = require 'javaapimodelfactory'
+local javaapimodelfactory = require 'javaapimodelfactory'
--------------------------------------
-- create internal content java object
function J._internalcontent(_internalcontent,_file,handledexpr)
- -- Setting body
- local handledexpr = handledexpr or {}
- local jblock = J._block(_internalcontent.content,handledexpr)
- local jinternalcontent = javainternalmodelfactory.newinternalmodel(jblock)
+ -- Setting body
+ local handledexpr = handledexpr or {}
+ local jblock = J._block(_internalcontent.content,handledexpr)
+ local jinternalcontent = javainternalmodelfactory.newinternalmodel(jblock)
- -- Appending unknown global variables
- for _, _item in ipairs(_internalcontent.unknownglobalvars) do
- local jitem = javaapimodelbuilder._item(_item,true,handledexpr)
- javainternalmodelfactory.addunknownglobalvar(jinternalcontent,jitem)
+ -- Appending unknown global variables
+ for _, _item in ipairs(_internalcontent.unknownglobalvars) do
+ local jitem = javaapimodelbuilder._item(_item,true,handledexpr)
+ javainternalmodelfactory.addunknownglobalvar(jinternalcontent,jitem)
- -- add occurrences
- for _,_occurrence in ipairs(_item.occurrences) do
- local jidentifier = handledexpr[_occurrence]
- if jidentifier then
- javaapimodelfactory.addoccurrence(jitem,jidentifier)
- end
- end
- end
-
- -- Appending global variables
- for _, _item in pairs(_file.globalvars) do
- local jitem = handledexpr[_item]
- if _item.type and _item.type.tag == "exprtyperef" then
- javaapimodelfactory.setexpression(jitem,handledexpr[_item.type.expression])
- end
-
- -- add occurrences
- if jitem then
- for _,_occurrence in ipairs(_item.occurrences) do
- local jidentifier = handledexpr[_occurrence]
- if jidentifier then
- javaapimodelfactory.addoccurrence(jitem,jidentifier)
- end
- end
- end
- end
-
+ -- add occurrences
+ for _,_occurrence in ipairs(_item.occurrences) do
+ local jidentifier = handledexpr[_occurrence]
+ if jidentifier then
+ javaapimodelfactory.addoccurrence(jitem,jidentifier)
+ end
+ end
+ end
- return jinternalcontent
+ -- Appending global variables
+ for _, _item in pairs(_file.globalvars) do
+ local jitem = handledexpr[_item]
+ if _item.type and _item.type.tag == "exprtyperef" then
+ javaapimodelfactory.setexpression(jitem,handledexpr[_item.type.expression])
+ end
+
+ -- add occurrences
+ if jitem then
+ for _,_occurrence in ipairs(_item.occurrences) do
+ local jidentifier = handledexpr[_occurrence]
+ if jidentifier then
+ javaapimodelfactory.addoccurrence(jitem,jidentifier)
+ end
+ end
+ end
+ end
+
+
+ return jinternalcontent
end
--------------------------------------
-- create block java object
function J._block(_block,handledexpr)
- -- Setting source range
- local jblock = javainternalmodelfactory.newblock(
- _block.sourcerange.min -1,
- _block.sourcerange.max
- )
+ -- Setting source range
+ local jblock = javainternalmodelfactory.newblock(
+ _block.sourcerange.min -1,
+ _block.sourcerange.max
+ )
- -- Append nodes to block
- for _, _expr in pairs(_block.content) do
- local jexpr = javaexpressionbuilder._expression(_expr,handledexpr)
- if not jexpr and _expr.tag == "MBlock" then
- jexpr = J._block(_expr,handledexpr)
- end
+ -- Append nodes to block
+ for _, _expr in pairs(_block.content) do
+ local jexpr = javaexpressionbuilder._expression(_expr,handledexpr)
+ if not jexpr and _expr.tag == "MBlock" then
+ jexpr = J._block(_expr,handledexpr)
+ end
javainternalmodelfactory.addcontent(jblock,jexpr)
- end
+ end
- for _, _localvar in pairs(_block.localvars) do
- -- Create Java item
- local jitem = javaapimodelbuilder._item(_localvar.item,true,handledexpr)
+ for _, _localvar in pairs(_block.localvars) do
+ -- Create Java item
+ local jitem = javaapimodelbuilder._item(_localvar.item,true,handledexpr)
- -- add occurrence
- for _,_occurrence in ipairs(_localvar.item.occurrences) do
- local jidentifier = handledexpr[_occurrence]
- if jidentifier then
- javaapimodelfactory.addoccurrence(jitem,jidentifier)
- end
- end
+ -- add occurrence
+ for _,_occurrence in ipairs(_localvar.item.occurrences) do
+ local jidentifier = handledexpr[_occurrence]
+ if jidentifier then
+ javaapimodelfactory.addoccurrence(jitem,jidentifier)
+ end
+ end
- -- Append Java local variable definition
- local jlocalvar = javainternalmodelfactory.newlocalvar(
- jitem,
- _localvar.scope.min -1,
- _localvar.scope.max
- )
- javainternalmodelfactory.addlocalvar(jblock,jlocalvar)
- end
- return jblock
+ -- Append Java local variable definition
+ local jlocalvar = javainternalmodelfactory.newlocalvar(
+ jitem,
+ _localvar.scope.min -1,
+ _localvar.scope.max
+ )
+ javainternalmodelfactory.addlocalvar(jblock,jlocalvar)
+ end
+ return jblock
end
return J
diff --git a/plugins/org.eclipse.ldt/script/local/javamodelsbuilder.lua b/plugins/org.eclipse.ldt/script/local/javamodelsbuilder.lua
index 050c5b4..5953f01 100644
--- a/plugins/org.eclipse.ldt/script/local/javamodelsbuilder.lua
+++ b/plugins/org.eclipse.ldt/script/local/javamodelsbuilder.lua
@@ -51,10 +51,10 @@
local cleanedsource
local iscleaned = false
if linetoclean == nbline then
- -- manage last line
- cleanedsource = source:gsub('([^\n]-)$',function (lastline)
+ -- manage last line
+ cleanedsource = source:gsub('([^\n]-)$',function (lastline)
iscleaned = true
- return string.rep(' ',string.len(lastline))
+ return string.rep(' ',string.len(lastline))
end)
elseif linetoclean == 1 then
-- manage first line
@@ -71,7 +71,7 @@
end
return cleanedsource, iscleaned
end
-
+
local cleanedsource
local iscleaned = false
if lineindex == 1 then
@@ -101,7 +101,7 @@
cleanedsource, iscleaned = cleanline(source,realcodelineindex,nbline)
end
end
-
+
-- after cleaning, recheck hoping there are no errors.
if iscleaned then
f, _ = loadstring(cleanedsource,'source_to_check')
@@ -112,7 +112,7 @@
end
end
end
-
+
-- take cleaned source as source
if f then
source = cleanedsource
@@ -121,7 +121,7 @@
-- ------------------------------------------------
-- END OF EXPERIMENTAL CODE
-- -------------------------------------------------
- end
+ end
if not f then return root end
diff --git a/tests/org.eclipse.ldt.lua.tests/lib/tablecompare.lua b/tests/org.eclipse.ldt.lua.tests/lib/tablecompare.lua
index 3a7409d..04980b5 100644
--- a/tests/org.eclipse.ldt.lua.tests/lib/tablecompare.lua
+++ b/tests/org.eclipse.ldt.lua.tests/lib/tablecompare.lua
@@ -17,24 +17,24 @@
local function checks() end
local function pathconcat(pt, starti, endi)
- local t = {}
- local prev
- local empties = 0
- starti = starti or 1
- endi = endi or #pt
+ local t = {}
+ local prev
+ local empties = 0
+ starti = starti or 1
+ endi = endi or #pt
- for i = starti, endi do
- local v = pt[i]
- if not v then break
- elseif v == '' then
- empties = empties+1
- else
- table.insert(t, prev)
- prev = v
- end
- end
- table.insert(t, prev)
- return table.concat(t, '.', 1, endi-starti+1-empties)
+ for i = starti, endi do
+ local v = pt[i]
+ if not v then break
+ elseif v == '' then
+ empties = empties+1
+ else
+ table.insert(t, prev)
+ prev = v
+ end
+ end
+ table.insert(t, prev)
+ return table.concat(t, '.', 1, endi-starti+1-empties)
end
--------------------------------------------------------------------------------
@@ -47,9 +47,9 @@
-- @return cleaned path as a string.
--
function M.clean(path)
- checks('string')
- local p = M.segments(path)
- return pathconcat(p)
+ checks('string')
+ local p = M.segments(path)
+ return pathconcat(p)
end
--
@@ -61,23 +61,23 @@
-- ("toto.titi",1), ("toto.tutu",2), ("tata",3) ("tonton.1", 4), ("tonton.2"=5)
--
function M.recursivepairs(t, prefix)
- checks('table', '?string')
- local function it(t, prefix, cp)
- cp[t] = true
- local pp = prefix == "" and prefix or "."
- for k, v in pairs(t) do
- k = pp..tostring(k)
- if type(v) == 'table' then
- if not cp[v] then it(v, prefix..k, cp) end
- else
- coroutine.yield(prefix..k, v)
- end
- end
- cp[t] = nil
- end
+ checks('table', '?string')
+ local function it(t, prefix, cp)
+ cp[t] = true
+ local pp = prefix == "" and prefix or "."
+ for k, v in pairs(t) do
+ k = pp..tostring(k)
+ if type(v) == 'table' then
+ if not cp[v] then it(v, prefix..k, cp) end
+ else
+ coroutine.yield(prefix..k, v)
+ end
+ end
+ cp[t] = nil
+ end
- prefix = prefix or ""
- return coroutine.wrap(function() it(t, M.clean(prefix), {}) end)
+ prefix = prefix or ""
+ return coroutine.wrap(function() it(t, M.clean(prefix), {}) end)
end
--------------------------------------------------------------------------------
@@ -90,17 +90,17 @@
-- @return list of split path elements.
--
function M.segments(path)
- checks('string')
- local t = {}
- local index, newindex, elt = 1
- repeat
- newindex = path:find(".", index, true) or #path+1 --last round
- elt = path:sub(index, newindex-1)
- elt = tonumber(elt) or elt
- if elt and elt ~= "" then table.insert(t, elt) end
- index = newindex+1
- until newindex==#path+1
- return t
+ checks('string')
+ local t = {}
+ local index, newindex, elt = 1
+ repeat
+ newindex = path:find(".", index, true) or #path+1 --last round
+ elt = path:sub(index, newindex-1)
+ elt = tonumber(elt) or elt
+ if elt and elt ~= "" then table.insert(t, elt) end
+ index = newindex+1
+ until newindex==#path+1
+ return t
end
---
@@ -112,40 +112,40 @@
-- ("toto.titi",1), ("toto.tutu",2), ("tata",3) ("tonton.1", 4), ("tonton.2"=5)
--
function M.recursivepairs(t, prefix)
- checks('table', '?string')
- local function it(t, prefix, cp)
- cp[t] = true
- local pp = prefix == "" and prefix or "."
- for k, v in pairs(t) do
- k = pp..tostring(k)
- if type(v) == 'table' then
- if not cp[v] then it(v, prefix..k, cp) end
- else
- coroutine.yield(prefix..k, v)
- end
- end
- cp[t] = nil
- end
+ checks('table', '?string')
+ local function it(t, prefix, cp)
+ cp[t] = true
+ local pp = prefix == "" and prefix or "."
+ for k, v in pairs(t) do
+ k = pp..tostring(k)
+ if type(v) == 'table' then
+ if not cp[v] then it(v, prefix..k, cp) end
+ else
+ coroutine.yield(prefix..k, v)
+ end
+ end
+ cp[t] = nil
+ end
- prefix = prefix or ""
- return coroutine.wrap(function() it(t, M.clean(prefix), {}) end)
+ prefix = prefix or ""
+ return coroutine.wrap(function() it(t, M.clean(prefix), {}) end)
end
function M.diff(t1, t2, norecurse)
- local d = {}
- local t3 = {}
- local rpairs = norecurse and pairs or M.recursivepairs
- for k, v in rpairs(t1) do t3[k] = v end
- for k, v in rpairs(t2) do
- if v ~= t3[k] then
- table.insert(d, k)
- end
- t3[k] = nil
- end
- for k, v in pairs(t3) do
- table.insert(d, k)
- end
- return d
+ local d = {}
+ local t3 = {}
+ local rpairs = norecurse and pairs or M.recursivepairs
+ for k, v in rpairs(t1) do t3[k] = v end
+ for k, v in rpairs(t2) do
+ if v ~= t3[k] then
+ table.insert(d, k)
+ end
+ t3[k] = nil
+ end
+ for k, v in pairs(t3) do
+ table.insert(d, k)
+ end
+ return d
end
---
-- @function [parent=#tablecompare] compare
@@ -153,40 +153,40 @@
-- @param #table t2
--
local ignoredtypes = {
- ['function'] = true,
- ['thread'] = true,
- ['userdata'] = true,
+ ['function'] = true,
+ ['thread'] = true,
+ ['userdata'] = true,
}
function M.compare(t1, t2)
- -- Build t1 copy
- local t3 = {}
- for k,v in M.recursivepairs(t1) do
- t3[k] = v
- end
+ -- Build t1 copy
+ local t3 = {}
+ for k,v in M.recursivepairs(t1) do
+ t3[k] = v
+ end
- -- Browse recursively for differences with t2
- local differences = {}
- for k, v in M.recursivepairs( t2 ) do
- local t3valuetype = type( t3[k] )
- local t2valuetype = type( v )
+ -- Browse recursively for differences with t2
+ local differences = {}
+ for k, v in M.recursivepairs( t2 ) do
+ local t3valuetype = type( t3[k] )
+ local t2valuetype = type( v )
- -- Values are different when their type differ
- if t3valuetype ~= t2valuetype then
- table.insert(differences, k)
- elseif not ignoredtypes[t3valuetype] and v ~= t3[k] then
- -- Same type but different values
- table.insert(differences, k)
- end
- t3[k] = nil
- end
+ -- Values are different when their type differ
+ if t3valuetype ~= t2valuetype then
+ table.insert(differences, k)
+ elseif not ignoredtypes[t3valuetype] and v ~= t3[k] then
+ -- Same type but different values
+ table.insert(differences, k)
+ end
+ t3[k] = nil
+ end
- -- Loacate t1 keys which are not in t2
- for k, v in M.recursivepairs( t3 ) do
- table.insert(differences, k)
- end
- return differences
+ -- Loacate t1 keys which are not in t2
+ for k, v in M.recursivepairs( t3 ) do
+ table.insert(differences, k)
+ end
+ return differences
end
---
-- @function [parent=#tablecompare] stripfunctions
@@ -195,18 +195,18 @@
--
function M.stripfunctions(tab, visitedtables)
- -- Avoid infinite self referenced table browsing
- visitedtables = visitedtables or {}
- visitedtables[tab] = true
+ -- Avoid infinite self referenced table browsing
+ visitedtables = visitedtables or {}
+ visitedtables[tab] = true
- for k, v in pairs( tab ) do
- local typev = type(v)
- if typev == 'function' then
- tab[k] = nil
- elseif typev == 'table' and not visitedtables[v] then
- M.stripfunctions(v, visitedtables)
- end
- end
- return tab
+ for k, v in pairs( tab ) do
+ local typev = type(v)
+ if typev == 'function' then
+ tab[k] = nil
+ elseif typev == 'table' and not visitedtables[v] then
+ M.stripfunctions(v, visitedtables)
+ end
+ end
+ return tab
end
return M
diff --git a/tests/org.eclipse.ldt.lua.tests/lib/tabledumpbeautifier.lua b/tests/org.eclipse.ldt.lua.tests/lib/tabledumpbeautifier.lua
index e83e416..7854f1f 100644
--- a/tests/org.eclipse.ldt.lua.tests/lib/tabledumpbeautifier.lua
+++ b/tests/org.eclipse.ldt.lua.tests/lib/tabledumpbeautifier.lua
@@ -16,94 +16,94 @@
local cache
local function buildcache(str)
- -- Generate AST
- local ast = mlc:src_to_ast(str)
- local status, astvalid, errormsg = pcall(compiler.check_ast, ast)
- if not astvalid then
- return nil, string.format("Unable to build AST.%s", errormsg)
- end
+ -- Generate AST
+ local ast = mlc:src_to_ast(str)
+ local status, astvalid, errormsg = pcall(compiler.check_ast, ast)
+ if not astvalid then
+ return nil, string.format("Unable to build AST.%s", errormsg)
+ end
- -- Cache string nodes
- cache = {}
+ -- Cache string nodes
+ cache = {}
Q(ast):filter('String') :foreach(function(node)
if node.tag == 'String' then
table.insert(cache, node)
end
end)
- return true
+ return true
end
local function mustbeignored(offset)
- if not cache then
- return nil, 'Cache is not available.'
- end
- for _, node in ipairs(cache) do
- if node.lineinfo.first.offset <= offset and offset <= node.lineinfo.last.offset then
- return true
- end
- end
- return false
+ if not cache then
+ return nil, 'Cache is not available.'
+ end
+ for _, node in ipairs(cache) do
+ if node.lineinfo.first.offset <= offset and offset <= node.lineinfo.last.offset then
+ return true
+ end
+ end
+ return false
end
function M.prettify(serializedtablestring)
- -- Check input
- if type(serializedtablestring) ~= 'string' then
- return nil, 'String expected.'
- end
+ -- Check input
+ if type(serializedtablestring) ~= 'string' then
+ return nil, 'String expected.'
+ end
- local chartoindent = {
- [','] = ',\n',
- [';'] = ';\n',
- ['{'] = '{\n',
- ['}'] = '\n}\n'
- }
+ local chartoindent = {
+ [','] = ',\n',
+ [';'] = ';\n',
+ ['{'] = '{\n',
+ ['}'] = '\n}\n'
+ }
- --
- -- Replace provided characters
- --
- for char, replacement in pairs(chartoindent) do
+ --
+ -- Replace provided characters
+ --
+ for char, replacement in pairs(chartoindent) do
- -- Refresh cache as offset of analyzed string change at each loop
- local status, error = buildcache(serializedtablestring)
- if not status then
- return nil, error
- end
+ -- Refresh cache as offset of analyzed string change at each loop
+ local status, error = buildcache(serializedtablestring)
+ if not status then
+ return nil, error
+ end
- --
- -- Seek for character to replace
- --
- local buffer = {}
- local searchstart= 1
- local charposition = serializedtablestring:find(char, searchstart, true)
- while charposition ~= nil do
+ --
+ -- Seek for character to replace
+ --
+ local buffer = {}
+ local searchstart= 1
+ local charposition = serializedtablestring:find(char, searchstart, true)
+ while charposition ~= nil do
- -- Bufferize code before character
- table.insert(buffer, serializedtablestring:sub(searchstart, charposition - 1))
+ -- Bufferize code before character
+ table.insert(buffer, serializedtablestring:sub(searchstart, charposition - 1))
- -- Ensure that we can replace this character, avoiding thoses in strings
- local ignoreposition, errormessage = mustbeignored(charposition)
- if errormessage then
- return nil, string.format('Unable to know if offset %d has to be formated.%s.', charposition, errormessage)
- elseif ignoreposition then
- -- Ignore character
- table.insert(buffer, char)
- else
- -- Actual character replacement
- table.insert(buffer, replacement)
- end
+ -- Ensure that we can replace this character, avoiding thoses in strings
+ local ignoreposition, errormessage = mustbeignored(charposition)
+ if errormessage then
+ return nil, string.format('Unable to know if offset %d has to be formated.%s.', charposition, errormessage)
+ elseif ignoreposition then
+ -- Ignore character
+ table.insert(buffer, char)
+ else
+ -- Actual character replacement
+ table.insert(buffer, replacement)
+ end
- -- Move to next replacement
- searchstart = charposition + 1
- charposition = serializedtablestring:find(char, searchstart, true)
- end
+ -- Move to next replacement
+ searchstart = charposition + 1
+ charposition = serializedtablestring:find(char, searchstart, true)
+ end
- -- Append remaining string
- table.insert(buffer, serializedtablestring:sub(searchstart))
+ -- Append remaining string
+ table.insert(buffer, serializedtablestring:sub(searchstart))
- -- Replace original string
- serializedtablestring = table.concat(buffer)
- end
+ -- Replace original string
+ serializedtablestring = table.concat(buffer)
+ end
- -- Format resulting code
- return formatter.indentcode(serializedtablestring, '\n', true, '\t')
+ -- Format resulting code
+ return formatter.indentcode(serializedtablestring, '\n', true, '\t')
end
return M
diff --git a/tests/org.eclipse.ldt.lua.tests/lib/testmodelutil.lua b/tests/org.eclipse.ldt.lua.tests/lib/testmodelutil.lua
index cd5f8dd..8d33c87 100644
--- a/tests/org.eclipse.ldt.lua.tests/lib/testmodelutil.lua
+++ b/tests/org.eclipse.ldt.lua.tests/lib/testmodelutil.lua
@@ -11,7 +11,7 @@
local apimodel = require "models.apimodel"
---
--- @module testmodelutil
+-- @module testmodelutil
local M ={}
---
@@ -19,29 +19,29 @@
-- @param #table model without functions
-- @return #table model with functions
function M.addfunctions(model,visitednode)
-
- -- Avoid infinite self referenced table browsing
- local visitednode = visitednode or {}
- visitednode[model] = true
- -- add function for known table
- if model.tag and apimodel["_"..model.tag] and type(apimodel["_"..model.tag]) == 'function' then
- local emptymodel = apimodel["_"..model.tag]()
- for k,v in pairs(emptymodel) do
- if (type(v) == 'function') then
- model[k] = v
- end
- end
- end
-
- -- do it recursively
- for k, v in pairs( model ) do
- if type(v) == 'table' and not visitednode[v] then
- M.addfunctions(v, visitednode)
- end
- end
- return model
+ -- Avoid infinite self referenced table browsing
+ local visitednode = visitednode or {}
+ visitednode[model] = true
+
+ -- add function for known table
+ if model.tag and apimodel["_"..model.tag] and type(apimodel["_"..model.tag]) == 'function' then
+ local emptymodel = apimodel["_"..model.tag]()
+ for k,v in pairs(emptymodel) do
+ if (type(v) == 'function') then
+ model[k] = v
+ end
+ end
+ end
+
+ -- do it recursively
+ for k, v in pairs( model ) do
+ if type(v) == 'table' and not visitednode[v] then
+ M.addfunctions(v, visitednode)
+ end
+ end
+ return model
end
-return M
\ No newline at end of file
+return M
diff --git a/tests/org.eclipse.ldt.lua.tests/lib/testutil.lua b/tests/org.eclipse.ldt.lua.tests/lib/testutil.lua
index d99a0b7..c80d509 100644
--- a/tests/org.eclipse.ldt.lua.tests/lib/testutil.lua
+++ b/tests/org.eclipse.ldt.lua.tests/lib/testutil.lua
@@ -21,22 +21,22 @@
-- Loading template engine environment
--
for key, value in pairs(require 'template.utils') do
- templateengine.env[key] = value
+ templateengine.env[key] = value
end
local M = {}
local errorhandling = function (filename)
- local errorbuffer = {}
- return function (err, offset)
- local message = string.format(
- "An error occured while parsing html for %s at offset %d:%s\n",
- filename,
- offset,
- err
- )
- table.insert(errorbuffer, message)
- end, errorbuffer
+ local errorbuffer = {}
+ return function (err, offset)
+ local message = string.format(
+ "An error occured while parsing html for %s at offset %d:%s\n",
+ filename,
+ offset,
+ err
+ )
+ table.insert(errorbuffer, message)
+ end, errorbuffer
end
---
@@ -46,15 +46,15 @@
-- @return #string file content
function M.loadfile(filepath)
- local luafile, errormessage = io.open(filepath, 'r')
- assert(
- luafile,
- string.format('Unable to read from %s.\n%s', filepath, errormessage or '')
- )
- local filestring = luafile:read('*a')
- luafile:close()
-
- return filestring
+ local luafile, errormessage = io.open(filepath, 'r')
+ assert(
+ luafile,
+ string.format('Unable to read from %s.\n%s', filepath, errormessage or '')
+ )
+ local filestring = luafile:read('*a')
+ luafile:close()
+
+ return filestring
end
---
@@ -65,27 +65,27 @@
-- @return table
-- @return status, errormessage in case of failure
function M.parsehtml(htmlstring, filename)
-
- -- Create parser for input html
- local handler = domhandler.createhandler()
- local xmlparser = xml.newparser(handler)
- xmlparser.options.stripWS = false
- local errorhandlingfunction, errormessages = errorhandling(filename)
- xmlparser.options.errorHandler = errorhandlingfunction
-
- -- Actual html parsing
- local status, pcallerror = pcall( function()
- xmlparser:parse(htmlstring)
- end)
-
- -- throw error with all message
- assert(status, string.format("%s\n%s\n%s",table.concat(errormessages), tostring(pcallerror),htmlstring))
-
- --throw failure
- if #errormessages ~= 0 then
- return nil, string.format("%s\n%s",table.concat(errormessages),htmlstring)
- end
- return handler.root
+
+ -- Create parser for input html
+ local handler = domhandler.createhandler()
+ local xmlparser = xml.newparser(handler)
+ xmlparser.options.stripWS = false
+ local errorhandlingfunction, errormessages = errorhandling(filename)
+ xmlparser.options.errorHandler = errorhandlingfunction
+
+ -- Actual html parsing
+ local status, pcallerror = pcall( function()
+ xmlparser:parse(htmlstring)
+ end)
+
+ -- throw error with all message
+ assert(status, string.format("%s\n%s\n%s",table.concat(errormessages), tostring(pcallerror),htmlstring))
+
+ --throw failure
+ if #errormessages ~= 0 then
+ return nil, string.format("%s\n%s",table.concat(errormessages),htmlstring)
+ end
+ return handler.root
end
---
@@ -97,41 +97,41 @@
-- @parm referencehtml Html expected
-- @return status, formattederrormessage
function M.comparehtml(generatedtable, referencetable, generatedhtml, referencehtml)
- -- Check that they are equivalent
- local equivalent = tablecompare.compare(generatedtable, referencetable)
- if #equivalent > 0 then
-
- -- Compute which keys differs
- local differentkeys = tablecompare.diff(generatedtable, referencetable)
- local differentkeysstring = pp.tostring(differentkeys, {line_max=1})
-
- -- Convert table in formatted string
- local xmlformatter = require("xmlformatter")
- local htmlformattedstring= xmlformatter.xmltostring(generatedtable)
- local htmlformattedreferencestring = xmlformatter.xmltostring(referencetable)
-
- -- Create the diff
- local diffclass = java.require("diff.match.patch.diff_match_patch")
- local diff = diffclass:new()
- local differences = diff:diff_main(htmlformattedstring,htmlformattedreferencestring)
- diff:diff_cleanupSemantic(differences)
-
- -- Prettify the result
- local diffutil = java.require("org.eclipse.ldt.lua.tests.internal.utils.DiffUtil")
- local prettydiff = diffutil:diff_pretty_diff(differences)
-
- -- Formalise first table output
- local _ = '_'
- local line = _:rep(80)
- local stringdiff = string.format('%s\nString Diff \n%s\n%s', line, line, prettydiff)
- local generatedhtml = string.format('%s\nGenerated HTML\n%s\n%s', line, line, generatedhtml)
- local referencehtml = string.format('%s\nReference HTML\n%s\n%s', line, line, referencehtml)
- local tablediff = string.format('%s\nTable Diff \n%s\n%s', line, line, differentkeysstring)
- local generatedtable = string.format('%s\nGenerated table\n%s\n%s', line, line, pp.tostring(generatedtable, {line_max=1}))
- local referencetable = string.format('%s\nReference table\n%s\n%s', line, line, pp.tostring(referencetable, {line_max=1}))
- return nil, string.format('The generated HTML is not the same as the reference:\n%s\n%s\n%s\n%s\n%s\n%s',stringdiff, generatedhtml, referencehtml, tablediff, generatedtable, referencetable)
- end
- return true
+ -- Check that they are equivalent
+ local equivalent = tablecompare.compare(generatedtable, referencetable)
+ if #equivalent > 0 then
+
+ -- Compute which keys differs
+ local differentkeys = tablecompare.diff(generatedtable, referencetable)
+ local differentkeysstring = pp.tostring(differentkeys, {line_max=1})
+
+ -- Convert table in formatted string
+ local xmlformatter = require("xmlformatter")
+ local htmlformattedstring= xmlformatter.xmltostring(generatedtable)
+ local htmlformattedreferencestring = xmlformatter.xmltostring(referencetable)
+
+ -- Create the diff
+ local diffclass = java.require("diff.match.patch.diff_match_patch")
+ local diff = diffclass:new()
+ local differences = diff:diff_main(htmlformattedstring,htmlformattedreferencestring)
+ diff:diff_cleanupSemantic(differences)
+
+ -- Prettify the result
+ local diffutil = java.require("org.eclipse.ldt.lua.tests.internal.utils.DiffUtil")
+ local prettydiff = diffutil:diff_pretty_diff(differences)
+
+ -- Formalise first table output
+ local _ = '_'
+ local line = _:rep(80)
+ local stringdiff = string.format('%s\nString Diff \n%s\n%s', line, line, prettydiff)
+ local generatedhtml = string.format('%s\nGenerated HTML\n%s\n%s', line, line, generatedhtml)
+ local referencehtml = string.format('%s\nReference HTML\n%s\n%s', line, line, referencehtml)
+ local tablediff = string.format('%s\nTable Diff \n%s\n%s', line, line, differentkeysstring)
+ local generatedtable = string.format('%s\nGenerated table\n%s\n%s', line, line, pp.tostring(generatedtable, {line_max=1}))
+ local referencetable = string.format('%s\nReference table\n%s\n%s', line, line, pp.tostring(referencetable, {line_max=1}))
+ return nil, string.format('The generated HTML is not the same as the reference:\n%s\n%s\n%s\n%s\n%s\n%s',stringdiff, generatedhtml, referencehtml, tablediff, generatedtable, referencetable)
+ end
+ return true
end
return M
diff --git a/tests/org.eclipse.ldt.lua.tests/lib/xmlformatter.lua b/tests/org.eclipse.ldt.lua.tests/lib/xmlformatter.lua
index 1e35bd9..f8bf70a 100644
--- a/tests/org.eclipse.ldt.lua.tests/lib/xmlformatter.lua
+++ b/tests/org.eclipse.ldt.lua.tests/lib/xmlformatter.lua
@@ -1,113 +1,114 @@
--------------------------------------------------------------------------------
--- Copyright (c) 2012 Sierra Wireless and others.
--- All rights reserved. This program and the accompanying materials
--- are made available under the terms of the Eclipse Public License v1.0
--- which accompanies this distribution, and is available at
--- http://www.eclipse.org/legal/epl-v10.html
---
--- Contributors:
--- Sierra Wireless - initial API and implementation
--------------------------------------------------------------------------------
-
---- @module xmlformatter
-local M = {}
-
-local nodetostring ={}-- a map (key=NODE_TYPE, value=conversion function)
-
--- convert xml to table
-local function xmltotable(domtable,result,indent,indentlevel)
- local f = nodetostring[domtable._type]
- if f then return f(domtable,result,indent,indentlevel) end
- return nil
-end
-
--- print ROOT in table
-local function roottotable(domtable,result,indent,indentlevel)
- for i,child in ipairs(domtable._children) do
- xmltotable(child,result,indent,indentlevel)
- end
- return result
-end
-
--- print ELEMENT in table
-local function elementtotable(domtable,result,indent,indentlevel)
- -- detect if this element has children
- local havechildren = domtable._children and (#(domtable._children) > 0)
-
- -- indent if needed
- if indent then
- result[#result+1] = "\n"
- result[#result+1] = string.rep(" ",indentlevel)
- end
-
- -- open element
- result[#result+1] = "<"
- result[#result+1] = domtable._name
-
- -- print attributes
- if domtable._attr then
- for id, val in pairs(domtable._attr) do
- result[#result+1] = " "
- result[#result+1] = id
- result[#result+1] = "=\""
- result[#result+1] = val
- result[#result+1] = "\""
- end
- end
-
- -- auto close balise if needed
- if havechildren then
- result[#result+1] = ">"
- else
- result[#result+1] = "/>"
- end
-
- -- do not indent in a pre balise
- indent = indent and domtable._name ~= "pre"
- local indentnext = true
-
- -- print children
- if havechildren then
- for i,child in ipairs(domtable._children) do
- xmltotable(child,result,indent and indentnext,indentlevel+1)
- indentnext = child._type ~= "TEXT"
- end
- end
-
- -- close element
- if havechildren then
- if indent and indentnext then
- result[#result+1] = "\n"
- result[#result+1] = string.rep(" ",indentlevel)
- end
- result[#result+1] = "</"
- result[#result+1] = domtable._name
- result[#result+1] = ">"
- end
-
- return result
-end
-
--- print TEXT in table
-local function texttotable(domtable,result,indent,indentlevel)
- -- just print the text
- result[#result+1] = domtable._text
-
- return result
-end
-
-
-nodetostring.ROOT=roottotable
-nodetostring.ELEMENT=elementtotable
-nodetostring.TEXT=texttotable
-
-----------------------------------------------
--- @function [parent=#xmlformatter] xmltostring
--- @param #table domtable return by domhandler
-function M.xmltostring(domtable)
- local result = xmltotable(domtable,{},true,0)
- return table.concat(result);
-end
-
-return M
-
+-------------------------------------------------------------------------------
+-- Copyright (c) 2012 Sierra Wireless and others.
+-- All rights reserved. This program and the accompanying materials
+-- are made available under the terms of the Eclipse Public License v1.0
+-- which accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- Contributors:
+-- Sierra Wireless - initial API and implementation
+-------------------------------------------------------------------------------
+
+--- @module xmlformatter
+local M = {}
+
+local nodetostring ={}-- a map (key=NODE_TYPE, value=conversion function)
+
+-- convert xml to table
+local function xmltotable(domtable,result,indent,indentlevel)
+ local f = nodetostring[domtable._type]
+ if f then return f(domtable,result,indent,indentlevel) end
+ return nil
+end
+
+-- print ROOT in table
+local function roottotable(domtable,result,indent,indentlevel)
+ for i,child in ipairs(domtable._children) do
+ xmltotable(child,result,indent,indentlevel)
+ end
+ return result
+end
+
+-- print ELEMENT in table
+local function elementtotable(domtable,result,indent,indentlevel)
+ -- detect if this element has children
+ local havechildren = domtable._children and (#(domtable._children) > 0)
+
+ -- indent if needed
+ if indent then
+ result[#result+1] = "\n"
+ result[#result+1] = string.rep(" ",indentlevel)
+ end
+
+ -- open element
+ result[#result+1] = "<"
+ result[#result+1] = domtable._name
+
+ -- print attributes
+ if domtable._attr then
+ for id, val in pairs(domtable._attr) do
+ result[#result+1] = " "
+ result[#result+1] = id
+ result[#result+1] = "=\""
+ result[#result+1] = val
+ result[#result+1] = "\""
+ end
+ end
+
+ -- auto close balise if needed
+ if havechildren then
+ result[#result+1] = ">"
+ else
+ result[#result+1] = "/>"
+ end
+
+ -- do not indent in a pre balise
+ indent = indent and domtable._name ~= "pre"
+ local indentnext = true
+
+ -- print children
+ if havechildren then
+ for i,child in ipairs(domtable._children) do
+ xmltotable(child,result,indent and indentnext,indentlevel+1)
+ indentnext = child._type ~= "TEXT"
+ end
+ end
+
+ -- close element
+ if havechildren then
+ if indent and indentnext then
+ result[#result+1] = "\n"
+ result[#result+1] = string.rep(" ",indentlevel)
+ end
+ result[#result+1] = "</"
+ result[#result+1] = domtable._name
+ result[#result+1] = ">"
+ end
+
+ return result
+end
+
+-- print TEXT in table
+local function texttotable(domtable,result,indent,indentlevel)
+ -- just print the text
+ result[#result+1] = domtable._text
+
+ return result
+end
+
+
+nodetostring.ROOT=roottotable
+nodetostring.ELEMENT=elementtotable
+nodetostring.TEXT=texttotable
+
+----------------------------------------------
+-- @function [parent=#xmlformatter] xmltostring
+-- @param #table domtable return by domhandler
+function M.xmltostring(domtable)
+ local result = xmltotable(domtable,{},true,0)
+ return table.concat(result);
+end
+
+return M
+
+
diff --git a/tests/org.eclipse.ldt.lua.tests/scripts/generatehtml.lua b/tests/org.eclipse.ldt.lua.tests/scripts/generatehtml.lua
index c22bffc..47af9b8 100644
--- a/tests/org.eclipse.ldt.lua.tests/scripts/generatehtml.lua
+++ b/tests/org.eclipse.ldt.lua.tests/scripts/generatehtml.lua
@@ -22,7 +22,7 @@
local modeltransformations = require 'modeltransformations'
local templateengine = require 'templateengine'
for key, value in pairs(require 'template.utils') do
- templateengine.env[key] = value
+ templateengine.env[key] = value
end
if #arg < 1 then
print 'No file to serialize.'
diff --git a/tests/org.eclipse.ldt.lua.tests/scripts/modeltransformations.lua b/tests/org.eclipse.ldt.lua.tests/scripts/modeltransformations.lua
index bd3050a..2da754f 100644
--- a/tests/org.eclipse.ldt.lua.tests/scripts/modeltransformations.lua
+++ b/tests/org.eclipse.ldt.lua.tests/scripts/modeltransformations.lua
@@ -26,48 +26,48 @@
-- @param #function transformationfunction
function M.codetoserialisedmodel(sourcefilepath, resultextension, transformationfunction)
- -- Load file
- local luafile, errormessage = io.open(sourcefilepath, 'r')
- if not luafile then
- return nil, errormessage
- end
- local luasource = luafile:read('*a')
- luafile:close()
+ -- Load file
+ local luafile, errormessage = io.open(sourcefilepath, 'r')
+ if not luafile then
+ return nil, errormessage
+ end
+ local luasource = luafile:read('*a')
+ luafile:close()
- -- Generate AST
- local ast = mlc:src_to_ast( luasource )
- local status, astvalid, errormsg = pcall(compiler.check_ast, ast)
- if not astvalid then
- return nil, string.format('Unable to generate AST for %s.\n%s', sourcefilepath, errormsg)
- end
-
- --Generate model
- local model = transformationfunction(ast)
+ -- Generate AST
+ local ast = mlc:src_to_ast( luasource )
+ local status, astvalid, errormsg = pcall(compiler.check_ast, ast)
+ if not astvalid then
+ return nil, string.format('Unable to generate AST for %s.\n%s', sourcefilepath, errormsg)
+ end
- -- Strip functions
- model = tablecompare.stripfunctions( model )
-
- -- Serialize model
- local serializedcode = serializer.dump( model, {comment = true} )
-
- -- Beautify serialized model
- local beautifulserializedcode, error = tabledumpbeautifier.prettify(serializedcode)
- if not beautifulserializedcode then
- print(string.format("Unable to prettify serialized code.\n%s", error))
- beautifulserializedcode = serializedcode
- end
+ --Generate model
+ local model = transformationfunction(ast)
- -- Define file name
- local extreplacement = table.concat({'%1.', resultextension})--string.format('\%1.%s', resultextension)
- local serializedfilename = sourcefilepath:gsub('([%w%-_/\\]+)%.lua$', extreplacement)
+ -- Strip functions
+ model = tablecompare.stripfunctions( model )
- -- Save serialized model
- local serializefile = assert(io.open(serializedfilename, 'w'))
- serializefile:write( beautifulserializedcode )
- serializefile:close()
+ -- Serialize model
+ local serializedcode = serializer.dump( model, {comment = true} )
- -- This a success
- print( string.format('%s serialized to %s.', sourcefilepath, serializedfilename) )
- return true
+ -- Beautify serialized model
+ local beautifulserializedcode, error = tabledumpbeautifier.prettify(serializedcode)
+ if not beautifulserializedcode then
+ print(string.format("Unable to prettify serialized code.\n%s", error))
+ beautifulserializedcode = serializedcode
+ end
+
+ -- Define file name
+ local extreplacement = table.concat({'%1.', resultextension})--string.format('\%1.%s', resultextension)
+ local serializedfilename = sourcefilepath:gsub('([%w%-_/\\]+)%.lua$', extreplacement)
+
+ -- Save serialized model
+ local serializefile = assert(io.open(serializedfilename, 'w'))
+ serializefile:write( beautifulserializedcode )
+ serializefile:close()
+
+ -- This a success
+ print( string.format('%s serialized to %s.', sourcefilepath, serializedfilename) )
+ return true
end
return M
diff --git a/tests/org.eclipse.ldt.lua.tests/scripts/serializeapimodel.lua b/tests/org.eclipse.ldt.lua.tests/scripts/serializeapimodel.lua
index 6c57e27..53e1e2a 100644
--- a/tests/org.eclipse.ldt.lua.tests/scripts/serializeapimodel.lua
+++ b/tests/org.eclipse.ldt.lua.tests/scripts/serializeapimodel.lua
@@ -21,19 +21,19 @@
local apimodelbuiler = require 'models.apimodelbuilder'
local modeltransformations = require 'modeltransformations'
if #arg < 1 then
- print 'No file to serialize.'
- return
+ print 'No file to serialize.'
+ return
end
for k = 1, #arg do
- -- Load source to serialize
- local filename = arg[k]
- local status, error = modeltransformations.codetoserialisedmodel(
- filename,
- 'serialized',
- apimodelbuiler.createmoduleapi
- )
- if not status then
- print( error )
- end
+ -- Load source to serialize
+ local filename = arg[k]
+ local status, error = modeltransformations.codetoserialisedmodel(
+ filename,
+ 'serialized',
+ apimodelbuiler.createmoduleapi
+ )
+ if not status then
+ print( error )
+ end
end
diff --git a/tests/org.eclipse.ldt.lua.tests/scripts/serializebothmodel.lua b/tests/org.eclipse.ldt.lua.tests/scripts/serializebothmodel.lua
index 0a1bf7e..fb5ddf3 100644
--- a/tests/org.eclipse.ldt.lua.tests/scripts/serializebothmodel.lua
+++ b/tests/org.eclipse.ldt.lua.tests/scripts/serializebothmodel.lua
@@ -24,38 +24,38 @@
local tablecompare = require 'tablecompare'
if #arg < 1 then
- print 'No file to serialize.'
- return
+ print 'No file to serialize.'
+ return
end
for k = 1, #arg do
- -- Load source to serialize
- local filename = arg[k]
- local status, error = modeltransformations.codetoserialisedmodel(
- filename,
- 'serialized',
- function (ast)
- --
- -- Generate API model
- --
- local apimodel, comment2apiobj = apimodelbuilder.createmoduleapi(ast)
-
- --
- -- Generate internal model
- --
- local internalmodel = internalmodelbuilder.createinternalcontent(ast,apimodel,comment2apiobj, 'modulename')
-
- --
- -- create table with the two models
- --
- local luasourceroot = {}
- luasourceroot.fileapi=apimodel
- luasourceroot.internalcontent=internalmodel
-
- return luasourceroot
- end
- )
- if not status then
- print( error )
- end
+ -- Load source to serialize
+ local filename = arg[k]
+ local status, error = modeltransformations.codetoserialisedmodel(
+ filename,
+ 'serialized',
+ function (ast)
+ --
+ -- Generate API model
+ --
+ local apimodel, comment2apiobj = apimodelbuilder.createmoduleapi(ast)
+
+ --
+ -- Generate internal model
+ --
+ local internalmodel = internalmodelbuilder.createinternalcontent(ast,apimodel,comment2apiobj, 'modulename')
+
+ --
+ -- create table with the two models
+ --
+ local luasourceroot = {}
+ luasourceroot.fileapi=apimodel
+ luasourceroot.internalcontent=internalmodel
+
+ return luasourceroot
+ end
+ )
+ if not status then
+ print( error )
+ end
end
diff --git a/tests/org.eclipse.ldt.lua.tests/scripts/serializeinternalmodel.lua b/tests/org.eclipse.ldt.lua.tests/scripts/serializeinternalmodel.lua
index 7f4baee..2e97e58 100644
--- a/tests/org.eclipse.ldt.lua.tests/scripts/serializeinternalmodel.lua
+++ b/tests/org.eclipse.ldt.lua.tests/scripts/serializeinternalmodel.lua
@@ -21,19 +21,19 @@
local internalmodelbuilder = require 'models.internalmodelbuilder'
local modeltransformations = require 'modeltransformations'
if #arg < 1 then
- print 'No file to serialize.'
- return
+ print 'No file to serialize.'
+ return
end
for k = 1, #arg do
- -- Load source to serialize
- local filename = arg[k]
- local status, error = modeltransformations.codetoserialisedmodel(
- filename,
- 'serialized.lua',
- internalmodelbuilder.createinternalcontent
- )
- if not status then
- print( error )
- end
+ -- Load source to serialize
+ local filename = arg[k]
+ local status, error = modeltransformations.codetoserialisedmodel(
+ filename,
+ 'serialized.lua',
+ internalmodelbuilder.createinternalcontent
+ )
+ if not status then
+ print( error )
+ end
end