From faaceedf0009e662ff1a78683a97656a04337587 Mon Sep 17 00:00:00 2001 From: Matej Sychra Date: Tue, 21 Feb 2023 16:00:47 +0100 Subject: [PATCH] workarounds and cleaning unsupported stuff --- .../{checkmarx.yml => checkmarx.ym_} | 8 +- .gitmodules | 8 - base | 2 +- builders/arduino-docker-build | 2 +- builders/lua-inspect | 1 - builders/lua-inspect/CHANGES.txt | 212 +++ builders/lua-inspect/COPYRIGHT | 28 + builders/lua-inspect/COPYRIGHT-extman | 23 + builders/lua-inspect/COPYRIGHT-jquery | 20 + builders/lua-inspect/README.txt | 183 ++ builders/lua-inspect/examples.lua | 202 +++ builders/lua-inspect/extman/extman.lua | 896 ++++++++++ .../lua-inspect/extman/scite_lua/bit.luax | 241 +++ .../lua-inspect/extman/scite_lua/borland.lua | 58 + .../lua-inspect/extman/scite_lua/ctagsdx.lua | 214 +++ .../extman/scite_lua/luainspect.lua | 8 + .../lua-inspect/extman/scite_lua/prompt.lua | 91 + .../extman/scite_lua/select_block.lua | 33 + .../extman/scite_lua/select_string.lua | 25 + .../extman/scite_lua/switch_buffers.lua | 31 + .../extman/scite_lua/switch_headers.lua | 37 + .../lua-inspect/htmllib/jquery-1.4.2.min.js | 154 ++ builders/lua-inspect/htmllib/luainspect.css | 33 + builders/lua-inspect/htmllib/luainspect.js | 66 + builders/lua-inspect/lib/luainspect/ast.lua | 929 ++++++++++ .../lua-inspect/lib/luainspect/command.lua | 85 + .../lua-inspect/lib/luainspect/compat_env.lua | 390 ++++ .../lua-inspect/lib/luainspect/delimited.lua | 46 + builders/lua-inspect/lib/luainspect/dump.lua | 90 + .../lua-inspect/lib/luainspect/globals.lua | 216 +++ builders/lua-inspect/lib/luainspect/html.lua | 101 ++ builders/lua-inspect/lib/luainspect/init.lua | 1431 +++++++++++++++ builders/lua-inspect/lib/luainspect/scite.lua | 1591 +++++++++++++++++ .../lua-inspect/lib/luainspect/signatures.lua | 433 +++++ .../lua-inspect/lib/luainspect/typecheck.lua | 40 + builders/lua-inspect/lib/luainspect/types.lua | 130 ++ builders/lua-inspect/luainspect | 17 + builders/lua-inspect/metalualib/LICENSE | 35 + builders/lua-inspect/metalualib/README.TXT | 397 ++++ builders/lua-inspect/metalualib/gg.lua | 748 ++++++++ builders/lua-inspect/metalualib/lexer.lua | 513 ++++++ .../lua-inspect/metalualib/metalua/base.lua | 107 ++ .../metalualib/metalua/runtime.lua | 3 + .../metalualib/metalua/string2.lua | 44 + .../lua-inspect/metalualib/metalua/table2.lua | 372 ++++ builders/lua-inspect/metalualib/mlp_expr.lua | 204 +++ builders/lua-inspect/metalualib/mlp_ext.lua | 89 + builders/lua-inspect/metalualib/mlp_lexer.lua | 32 + builders/lua-inspect/metalualib/mlp_meta.lua | 118 ++ builders/lua-inspect/metalualib/mlp_misc.lua | 185 ++ builders/lua-inspect/metalualib/mlp_stat.lua | 221 +++ builders/lua-inspect/metalualib/mlp_table.lua | 92 + builders/lua-inspect/test.lua | 7 + builders/micropython-docker-build | 2 +- builders/mongoose-docker-build | 2 +- builders/nodemcu-docker-build | 2 +- builders/nodemcu-firmware | 1 - builders/platformio-docker-build | 2 +- 58 files changed, 11234 insertions(+), 17 deletions(-) rename .github/workflows/{checkmarx.yml => checkmarx.ym_} (75%) delete mode 160000 builders/lua-inspect create mode 100644 builders/lua-inspect/CHANGES.txt create mode 100644 builders/lua-inspect/COPYRIGHT create mode 100644 builders/lua-inspect/COPYRIGHT-extman create mode 100644 builders/lua-inspect/COPYRIGHT-jquery create mode 100644 builders/lua-inspect/README.txt create mode 100644 builders/lua-inspect/examples.lua create mode 100644 builders/lua-inspect/extman/extman.lua create mode 100644 builders/lua-inspect/extman/scite_lua/bit.luax create mode 100644 builders/lua-inspect/extman/scite_lua/borland.lua create mode 100644 builders/lua-inspect/extman/scite_lua/ctagsdx.lua create mode 100644 builders/lua-inspect/extman/scite_lua/luainspect.lua create mode 100644 builders/lua-inspect/extman/scite_lua/prompt.lua create mode 100644 builders/lua-inspect/extman/scite_lua/select_block.lua create mode 100644 builders/lua-inspect/extman/scite_lua/select_string.lua create mode 100644 builders/lua-inspect/extman/scite_lua/switch_buffers.lua create mode 100644 builders/lua-inspect/extman/scite_lua/switch_headers.lua create mode 100644 builders/lua-inspect/htmllib/jquery-1.4.2.min.js create mode 100644 builders/lua-inspect/htmllib/luainspect.css create mode 100644 builders/lua-inspect/htmllib/luainspect.js create mode 100644 builders/lua-inspect/lib/luainspect/ast.lua create mode 100755 builders/lua-inspect/lib/luainspect/command.lua create mode 100644 builders/lua-inspect/lib/luainspect/compat_env.lua create mode 100644 builders/lua-inspect/lib/luainspect/delimited.lua create mode 100644 builders/lua-inspect/lib/luainspect/dump.lua create mode 100644 builders/lua-inspect/lib/luainspect/globals.lua create mode 100644 builders/lua-inspect/lib/luainspect/html.lua create mode 100644 builders/lua-inspect/lib/luainspect/init.lua create mode 100644 builders/lua-inspect/lib/luainspect/scite.lua create mode 100644 builders/lua-inspect/lib/luainspect/signatures.lua create mode 100644 builders/lua-inspect/lib/luainspect/typecheck.lua create mode 100644 builders/lua-inspect/lib/luainspect/types.lua create mode 100755 builders/lua-inspect/luainspect create mode 100644 builders/lua-inspect/metalualib/LICENSE create mode 100644 builders/lua-inspect/metalualib/README.TXT create mode 100644 builders/lua-inspect/metalualib/gg.lua create mode 100644 builders/lua-inspect/metalualib/lexer.lua create mode 100644 builders/lua-inspect/metalualib/metalua/base.lua create mode 100644 builders/lua-inspect/metalualib/metalua/runtime.lua create mode 100644 builders/lua-inspect/metalualib/metalua/string2.lua create mode 100644 builders/lua-inspect/metalualib/metalua/table2.lua create mode 100644 builders/lua-inspect/metalualib/mlp_expr.lua create mode 100644 builders/lua-inspect/metalualib/mlp_ext.lua create mode 100644 builders/lua-inspect/metalualib/mlp_lexer.lua create mode 100644 builders/lua-inspect/metalualib/mlp_meta.lua create mode 100644 builders/lua-inspect/metalualib/mlp_misc.lua create mode 100644 builders/lua-inspect/metalualib/mlp_stat.lua create mode 100644 builders/lua-inspect/metalualib/mlp_table.lua create mode 100755 builders/lua-inspect/test.lua delete mode 160000 builders/nodemcu-firmware diff --git a/.github/workflows/checkmarx.yml b/.github/workflows/checkmarx.ym_ similarity index 75% rename from .github/workflows/checkmarx.yml rename to .github/workflows/checkmarx.ym_ index 43a851dae..c0364e5df 100644 --- a/.github/workflows/checkmarx.yml +++ b/.github/workflows/checkmarx.ym_ @@ -25,4 +25,10 @@ jobs: steps: - name: Checkmarx AST Github Action - uses: Checkmarx/ast-github-action@2.0.14 \ No newline at end of file + uses: Checkmarx/ast-github-action@2.0.14 + + with: + base_uri: https://ast.checkmarx.net/ + cx_tenant: nfr_nfr_ast_corpus + cx_client_id: ${{ secrets.CX_CLIENT_ID }} + cx_client_secret: ${{ secrets.CX_CLIENT_SECRET }} \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index 52d87f312..e3b4d2c87 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,10 +2,6 @@ path = builders/arduino-docker-build url = https://github.com/suculent/arduino-docker-build.git -[submodule "builders/lua-inspect"] - path = builders/lua-inspect - url = https://github.com/davidm/lua-inspect.git - [submodule "builders/micropython-docker-build"] path = builders/micropython-docker-build url = https://github.com/suculent/micropython-docker-build.git @@ -18,10 +14,6 @@ path = builders/nodemcu-docker-build url = https://github.com/suculent/nodemcu-docker-build.git -[submodule "builders/nodemcu-firmware"] - path = builders/nodemcu-firmware - url = https://github.com/suculent/nodemcu-firmware.git - [submodule "builders/platformio-docker-build"] path = builders/platformio-docker-build url = https://github.com/suculent/platformio-docker-build.git diff --git a/base b/base index ff0266d1b..38e2e232a 160000 --- a/base +++ b/base @@ -1 +1 @@ -Subproject commit ff0266d1bed5327f65e6217dfed881359b487565 +Subproject commit 38e2e232a25f20429c400ff499e24b18fb81b794 diff --git a/builders/arduino-docker-build b/builders/arduino-docker-build index b8a280322..8a05944e1 160000 --- a/builders/arduino-docker-build +++ b/builders/arduino-docker-build @@ -1 +1 @@ -Subproject commit b8a28032233f24a3c7399031ca226e035f27e13f +Subproject commit 8a05944e1d3d2182246f0f46540b112ee917adde diff --git a/builders/lua-inspect b/builders/lua-inspect deleted file mode 160000 index 901900890..000000000 --- a/builders/lua-inspect +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 901900890e8ff8fa104ef93f9aca587e723bef9c diff --git a/builders/lua-inspect/CHANGES.txt b/builders/lua-inspect/CHANGES.txt new file mode 100644 index 000000000..3f9771cc2 --- /dev/null +++ b/builders/lua-inspect/CHANGES.txt @@ -0,0 +1,212 @@ +Change Log. + +20120127 + [*] core: cleanup error messages in inferred values. + +20120126 + [+] Ignore locals named '_' in unused/masking variable reporting. + +20111224 + [+] html/delimited: export type information (in same manner as SciTE) + [*] html: improve line number CSS treatment. e.g. don't include in copy/paste + [+] html: highlight range of lines of scope of selected variable (like in SciTE). + [+] command: add 'luainspect' front-end script in top directory. + [+] command: add options for output name and html library path + +20100911 + [+] core: infer types of for loop variables. + +20100827 + [+] core: infer sets involving functions with multiple returns. + e.g. local a,b = (function() return 1,2 end)() + [!] core:fix: do not infer table sets on LuaInspect types. + +20100825 + [*] SciTE: simplify install (use default path) + [!] core: fix: function params should infer to unknown values + [!] core: fix: infer: unknown functions return unknown values + +20100823 + [*] SciTE: change Ctrl-Alt-W to Ctrl-Alt-E + [!] SciTE: fix bookmarking (Ctrl+F2) + [+] SciTE: bundle copy of extman.lua + +20100821 + [+!] core: return analysis enabled following fixes + +20100820 + [!] SciTE: fix folding performance problem (though folding still disabled by default + due to OnStyle recursion problem) + +20100819 + [!] core: fix tokenlist when opcode operands reversed lexically + [*] metalua/performance - avoid overriding builtin pairs/ipairs + [*] SciTE: plugin now loaded as Lua extension script (not globally). + +20100818 + [!] HTML: fix missing chars at end-of-file + [!] Metalua: fix lexer line number count off-by-one error + [!] SciTE: fix Unicode/UTF-8 encoding breaking formatting + [!] core: fix performance problem with tinsertlist function + [!] core/performance: cleanup invalidated_code function + +20100817 + [!] core: fix keyword token recognition problems + [!] core: skip inspection on require loops + [+] core: infer function return values (temporarily disabled) + [+] core: detect dead-code (temporarily disabled) + [*] core: internal refactoring (ast.valueknown) + +20100816 + core: make reporting optional + metalua: patches to metalua lineinfo + (was corrupting HTML output and SciTE highlighting) + +20100814 + core: add basic type inferences (e.g. number+number -> number) + +20100813 + core: inspect required modules too + (e.g. enables use of imported function signatures) + core/SciTE: add list all warnings command (SciTE: Ctrl+Alt+W lists, and F4 iterates them) + +20100811 + SciTE: autocomplete functions arguments when cursor after '(' + core: fix signatures for os/debug libraries + core/SciTE: display function argument list or helpinfo for variables + SciTE: Ctrl+Alt+I changed to Ctrl+Alt+B to avoid conflict with + SciTE 2.20 incremental search + +20100810 + SciTE: improved "inspect variable" command, supports browsing nested tables. + SciTE: split luainspect.autocomplete property into two properties + SciTE: add autocomplete function + SciTE: autocomplete table fields. + +20100809 + core/SciTE: add function argument count check + core/SciTE: jump to definition now supports functions in different files. + core/SciTE/HTML: improvements to displaying masking/masked lexicals. + core/SciTE: add command to just to previous statement + core/SciTE: preliminary variable autocomplete support + (luainspect.autocomplete currently disabled by default) + SciTE: add missing style.script_lua.local_param_mutate style. + +20100807 + SciTE: Add luainspect.path.append/luainspect.cpath.append properties + to append to package.path/cpath + SciTE: Add custom searcher function to locate modules in same path as current buffer. + SciTE: Added "force reinspect" command to force full reinspection of code. + Note: this will also attempt to unload any modules loaded by previous inspection. + SciTE: Improve luainspect.update.delay to delay inspection for given tick count + following user typing. Also displays blue '+' marker when inspection has been delayed. + +20100806 + SciTE: jump to uses, not jumps to exact position, not just line number + SciTE: mark lines of invalidated code upon introducing code errors and display + error message below invalidated code (not on exact line of error) + SciTE: add styling delay option to improve performance (luainspect.update.delay) + SciTE: preliminary auto-complete typing support (luainspect.autocomplete) + (experimental and currently off by default) + +20100805 + core: Major internal refactoring to simplify incremental compilation + (lineinfo managed in tokenlist). Breaks API. + core/SciTE/HTML: identifies local variables that mask other locals (same name): + e.g. local x=1; local x=2 (strikethrough) + core: added version number variable APIVERSION to luainspect.init. + HTML: highlight keywords in selected block + SciTE: the incremental compilation feature is now on by default. + +20100803 + core:Evaluate special comments (prefixed by '!') to inject semantic information into analysis + (similar to luaanalyze). + core: Further work on incremental compilation feature. + +20100802 + core: improve field value inferences + SciTE: improve dark style clarity + SciTE: make margin markers for variable scope and block mutually exclusive + +20100731 + SciTE: allow styles in properties to be specified by name and more flexibly overridden. + SciTE: add optional dark style + SciTE/HTML: support mutate upvalues, cleanup styles + SciTE: improve keyword highlighting (always highlight containing block) + +20100730 + core: fix scoping of `for` statements (in globals.lua) + core/SciTE: highlight keywords and show all keywords in selected statement. + +20100729 + SciTE: options can now be set with SciTE properties. + SciTE: refactor: select statement + core/SciTE: more work on incremental compilation (luainspect.incremental.compilation) + +20100728 + core/SciTE: add command to select statement or comment containing current cursor selection. + core/SciTE: experimental incremental compilation option (ALLOW_INCREMENTAL_COMPILATION) + core/SciTE: add special styling (background color) for tab whitespace + +20100727 + SciTE: Fix limited styling range may skip styling (broke in 20100726) + +20100726 + SciTE: apply default styles in script if not specified in properties file. + SciTE: initial implementation of folding (but currently disabled due to SciTE problems) + SciTE: improve OnStyle only over provided byte range + Note: you may now remove LuaInspect styles from your properties file. + +20100725 + SciTE: fix memory overflow when code contains buffer.notes. + +20100724 + SciTE: list all uses of selected variable (currently locals only) + SciTE: display errors about mismatched blocks or parens at both top and bottom of problem + SciTE: support shebang line + +20100723 + core/SciTE/HTML: Initial support for table fields + core/SciTE: initial dynamic value determination + core: fix recursive local scoping (`Localrec) in globals.lua + SciTE: Mark all range of selected variable's scope in margin + SciTE: New command to rename all occurrences of selected variable + SciTE: Significant performance gain utilizing loadstring in addition + to metalua libraries + SciTE: Mark upvalues (lighter blue) + SciTE: Fix handling multiple buffers. + SciTE: display variable info on double click + SciTE: display real-time annotations of all local variables, like a Mathcad worksheet + (experimental feature via ANNOTATE_ALL_LOCALS) + SciTE: jump (goto) definition of selected variable (currently locals only) + ctagsdx.lua from the full SciteExtMan is optional (allows "goto mark" command + to return to previous location following a "go to definition"). + SciTE: add command to inspect table contents. + Note: SciTE*.properties and luainspect.css have been updated; please update when upgrading + +20100720 + core: support for detecting unused locals (white on blue) + SciTE: display callinfo help on top-level standard library globals + SciTE: display local parameters distinctly (dark blue) + SciTE: display compiler errors as annotations + SciTE: partial workaround for conflict with other lexers + SciTE: option to recompile only when cursor line number changes to improve performance + and reduce error reporting (set UPDATE_ALWAYS to true in scite.lua to enable this) + SciTE: workaround for Metalua libraries sometimes not returning line number in error report + Note: SciTE*.properties and luainspect.css have been updated; please update when upgrading + +20100719 + core: Fixed "repeat" statement scope handling (globals.lua) + SciTE: Improve performance (not recompile when code not changing) + SciTE: Add "!" marker near compiler error. + SciTE: Add hotspots on local variables + +20100717-2 + SciTE: highlight all instances of selected identifier + Now requires http://lua-users.org/wiki/SciteExtMan + +20100717 + added initial SciTE text editor plugin + +20100622 + initial version with HTML output diff --git a/builders/lua-inspect/COPYRIGHT b/builders/lua-inspect/COPYRIGHT new file mode 100644 index 000000000..29b297bc8 --- /dev/null +++ b/builders/lua-inspect/COPYRIGHT @@ -0,0 +1,28 @@ +LuaInspect License + +Copyright (C) 2010 David Manura + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +=============================================================================== + +Uses Metalua libraries (see metalualib/LICENSE). +Uses jquery (see COPYRIGHT-jquery) +Uses ExtMan (see COPYRIGHT-extman) + diff --git a/builders/lua-inspect/COPYRIGHT-extman b/builders/lua-inspect/COPYRIGHT-extman new file mode 100644 index 000000000..688c6d372 --- /dev/null +++ b/builders/lua-inspect/COPYRIGHT-extman @@ -0,0 +1,23 @@ +ExtMan License + +Copyright (C) 2004-2010 Steve Donovan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +=============================================================================== diff --git a/builders/lua-inspect/COPYRIGHT-jquery b/builders/lua-inspect/COPYRIGHT-jquery new file mode 100644 index 000000000..ea336914d --- /dev/null +++ b/builders/lua-inspect/COPYRIGHT-jquery @@ -0,0 +1,20 @@ +Copyright (c) 2010 John Resig, http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/builders/lua-inspect/README.txt b/builders/lua-inspect/README.txt new file mode 100644 index 000000000..6f78d4e54 --- /dev/null +++ b/builders/lua-inspect/README.txt @@ -0,0 +1,183 @@ +LuaInspect - LuaInspect is a tool that does Lua code analysis. +It includes an extensive plugin for the SciTE [1] text editor, +there is also a plugin for the VIM editor [2], and it includes +an export to DHTML as well. + +== Project Page == + +For further details, see http://lua-users.org/wiki/LuaInspect . + +== Status == + +WARNING: Some of this code might not yet be stable or complete, +particularly with regards to inferencing. It is usable for daily code editing +but you may need to sometimes fix things yourself. Many additional +features could be added too. + +== Features == + + * analysis: + * identifies global (red) and local variables (blue), including locals that are + function arguments (dark blue) and upvalues (light blue) + * identifies unused local variables: e.g. `do local x=1 end` (white-on-blue) + * identifies local variables masking other locals (same name): e.g. `local x=1; local x=2` + (strikethrough and squiggle line) + * identifies local variables that have non-constant binding (`local x = 1; x = 2`) (italic) + * identifies unknown global variables (white-on-red) and table fields (red), inferred by + static and dynamic evaluation. + * infers values of variables (e.g. `local sum = math.pi + 2` is 5.14. + and defined-ness of members of imported modules + (`local mt = require "math"; math.sqrtt(2) -- undefined`) + * infers signatures of functions (including local, global, and module functions) + * checks number of function arguments against signatures + * cross-references variables (locals and module fields) with their definitions and uses + (pink highlight), identifies range of lines/scope where the local is defined + and (SciTE only) supports jump-to-definition and jump-to-uses + * identifies all keywords in selected block (underline) + * evaluate special comments (prefixed by '!') to inject semantic information into analysis + (similar to luaanalyze / lint). + * basic type inferences (e.g. number + number -> number) + * infer function return values (e.g. `function f(x) if x then return 1,2,3 else return 1,3,'z' end end` + returns 1, number, unknown). + * detect dead-code (e.g. `do return end dead()`) (SciTE only) (diagonal hatching) + * refactoring: + * command to rename all occurrences of selected variable (SciTE only) + * browsing: + * inspect members of selected table. + * select statement or comment containing current cursor selection (SciTE only) + * display real-time annotations of all local variables, like an Excel/Mathcad worksheet + (experimental feature via ANNOTATE_ALL_LOCALS) (currently SciTE only) + * auto-complete typing support (SciTE only) (experimental) + * interfaces: SciTE plugin, VIM plugin, and HTML output. + +== Files in this directory == + +metalualib/* - Copy of Metalua libraries. + Based on http://github.com/fab13n/metalua/tree/fcee97b8d0091ceb471902ee457dbccaab98234e + with a few bug fixes (search for "PATCHED:LuaInspect" in the source). +lib/* - LuaInspect libraries. +htmllib/* - HTML resources under here. +extman/* - SciTE extman. + Recent version compatible with LuaInspect. + +== Command-line Usage (HTML output) == + +Example: + + $ ./luainspect -fhtml -lhtmllib examples.lua > examples.html + +(Alternately just run "lua test.lua". You should also do "lua luainspect" +rather than "./luainspect" on Windows.) + +You will need to ensure that the JavaScript and CSS files in the +path after the "-l" argument can be found relative to the HTML file; +otherwise, the page will not display properly. + +== Command-line Usage (delimited CSV output) == + +Example: + + $ ./luainspect -fdelimited examples.lua > examples.csv + +== Installation in SciTE == + +First install SciTE . +Version 2.12 and 2.20 work (older versions might not work). + +The simple way to install LuaInspect into SciTE is to just place the +"luainspect" folder inside the same folder where your SciTE binary is +installed and add the following line to one of your SciTE properties +files (e.g. SciTEGlobal.properties or SciTEUser.properties -- consult +the SciTE documentation for where these are located): + + ext.lua.startup.script=$(SciteDefaultHome)/luainspect/extman/extman.lua + +That normally is all you need to do. + +If you placed LuaInspect somewhere else or are using your own version +of SciTE ExtMan (extman.lua), you will need to adjust the above to +reference the absolute path where extman.lua is installed. LuaInspect +includes its own copy of SciTE ExtMan +, and it's recommended to use +the included version because older versions might not work +properly. The files in the scite_lua subfolder are not strictly +necessary but are suggested. In particularly, scite_lua/luainspect.lua +allows ExtMan to find LuaInspect, and you will need to adjust this if +you move LuaInspect somewhere else relative to ExtMan. + +Dependencies: + Tested with SciTE version 2.12/2.20 (older versions might not work). + Requires http://lua-users.org/wiki/SciteExtMan (version included). + Note: ExtMan's ctagsdx.lua is recommended (allows "goto mark" + command to return to previous location following a "go to + definition" or "show all variable uses"). + +If you want to customize styles, add the contents of the +`light_styles` or `dark_styles` variable in the scite.lua file to a +SciTE properties file. + +== Configuring SciTE options == + +The following LuaInspect options can be configured in one of your +SciTE properties files: + + luainspect.update.always (0 or 1, default 1) + luainspect.delay.count (integer >= 1, default 5) + luainspect.annotate.all.locals (0 or 1, default 0) + luainspect.incremental.compilation (0 or 1, default 1) + luainspect.performance.tests (0 or 1, default 0) + luainspect.autocomplete.vars (0 or 1, default 0) + luainspect.autocomplete.syntax (0 or 1, default 0) + luainspect.path.append (string, default '') + luainspect.cpath.append (string, default '') + style.script_lua.scheme (string, '' or 'dark', default '') + +For details, see scite.lua. + +== Installation on VIM == + +See [2] for VIM editor support. + +== Preliminary support for luaanalyze style comments == + +To make all variables in scope match name 'ast$' be recognized by LuaInspect as a +table with field 'tag' of type string, add this to your code: + + --! context.apply_value('ast$', {tag=''}) + +The LuaInspect code itself uses this: + + --! require 'luainspect.typecheck' (context) + +== Design Notes == + +The font styles are intended to make the more dangerous +or questionable code stand out more. + +Local variables named '_' are ignored for purposes of unused/masking variable +reporting. Typical use case: `for _, v in ipairs(t) do <. . .> end`. + +== LICENSE == + +See COPYRIGHT file. + +== Credits == + +David Manura, original author. +Steve Donovan for discussions on design, SciTE and ExtMan. +Fabien Fleutot for Metalua and discussions. +SciTE suggestions/fixes by Tymur Gubayev. +Peter Odding for VIM editor support [2]. +Jon Akhtar - csv output and IntelliJ discussions. + +== Bugs == + +Please report bugs via github +or just "dee em dot el you ae at em ae tee ayche two dot ow ar gee", or +if you prefer neither then append to the wiki page +. + +== References == + +[1] http://www.scintilla.org/SciTE.html +[2] http://peterodding.com/code/vim/lua-inspect/ - VIM editor support diff --git a/builders/lua-inspect/examples.lua b/builders/lua-inspect/examples.lua new file mode 100644 index 000000000..b1ec63d6a --- /dev/null +++ b/builders/lua-inspect/examples.lua @@ -0,0 +1,202 @@ +-- some examples/tests. -*- coding: utf-8 -*- + +local mt = require "math" + +-- unicode test (this should not break highlighting) +do print("Δ™«»∂≈") end -- Δ™«»∂≈ + +-- Basic variable scope and usage tests +local unused_local = 1 +local used_local = 2; print(used_local) +local reassigned_local = 1; reassigned_local = 2 +local upval_local; function f() return upval_local end +local reassigned_upval_local; function f() reassigned_upval_local = 2 end +function f(param_unused_local, param_used_local, param_reassigned_local, param_upval_local, param_reassigned_upval_local) + print(param_used_local) + param_reassigned_local = 2 + return function() + print(param_upval_local) + param_reassigned_upval_local = 2 + end +end +print(undefined_global) +print(math) -- predefined global +print(defined_global); defined_global = 2; print(defined_global) + +-- Scope tests for specific statements +do local local1; for local1=local1,2 do end end -- used, unused+mask, used local +do local local1; for local1 in local1 do end end -- used, unused+mask, used local +do local local1; local local1 = local1 end -- used, unused+mask, used local +do local function local1() local1() end end -- used, used local +do local local1; local local1 = function() local1() end end -- used, unused+mask, used local +do -- test repeat-until + local local1 -- unused local + repeat + local local1 -- unused local+mask + local local1 -- used local+mask + until local1 -- used local +end +do -- test local var scope stays inside block + repeat local v2 until false + while false do local v3 end + for v4=1,1 do local v5 end + for v6 in nil do local v6 end + print(v2, v3, v4, v5, v6) -- undefined globals +end +do -- more masking testss + local abc,abc -- not mask, mask + local function bcd(bcd, abc, cde) local bcd end -- not mask, mask, mask, mask, not mask + for cde, cde in pairs{} do local cde end -- not mask, mask, mask + for def=1,2 do local def end -- not mask, mask + function abc:def() local self end -- not mask, mask + function abc:def() local self end -- not mask, mask + function abc:def(self) end -- not mask, mask +end +for _,x in ipairs{} do local _,x = function(_,x)end end -- ignore unused/masking '_' + +-- Field accesses +math.sqrt(math.pi) -- statically+dynamically defined fields +math["sqrt"](2) -- statically+dynamically defined field (this works too) +math.undefinedfield(math.pii) +_G.math.sqrt(2) -- dynamically defined (IMPROVE? statically defined too) +_=package.loaded -- statically+dynamically defined field +_=package.loaded.math -- dynamically defined field, deeply nested +_=package.loaded.undefinedfield +local root = math.sqrt; root(2) -- IMPROVE: statically define +math:sqrt(2) -- statically+dynamically defined invoke (although non-sensical - IMPROVE?) +math:undefmethod(2) +local t = {x=1, {y={z=2}}, f = function() end} +print(t.forwarddeclared(), t.undef()) -- recognized (forward declared), unrecognized +function t.forwarddeclared() end -- define +t.y = t.x, t[1].y.z, t[1].y.undef + t.undef, t.f().undef --OK? +;("abc"):upper():lower() -- dynamically defined (IMPROVE? statically defined too) +local m = math; local mm = {sqrt=math.sqrt}; print(m.sqrt, mm.sqrt, math.sqrt) --OK? + +-- infer values +local pi = math.pi -- 3.14... +local a1 = math.fmod(12, 10) == 2 -- true (safe function) + +-- more value inferences +local loc1 = 3 +loc1=4 +print(loc1) -- IMPROVE? infer value even though binding mutable? + +-- luaanalyze style comments. +--! context.apply_value('shape$', {color='?', size={x=1,y=1}, f=function() return "?" end}) +function f(myshape) print(myshape.color, myshape.size.x, myshape.undef, myshape.f():len()) end +--IMPROVE: `len` above + +-- Argument count checks. +function zero() end +function one(a) end +function two(a,b) end +function oneplus(a,...) end +function zeroplus(...) end +zero() zero(1) zero(1,2) +one() one(1) one(1,2) +one(f()) one(1,zero()) one(1,2,zero()) +two() two() two(1,2) +oneplus() oneplus(1) oneplus(1,2) oneplus(1,2,3) +zeroplus() +math.sqrt(1) math.sqrt(1,2) _G.math.sqrt(1,2) +local sq = math.sqrt +sq(1,2) +function f(...) + one(...) one(1, ...) one(1, 2, ...) +end +local tt = {zero=zero,one=one, more={one=one}} -- test methods +tt:zero() tt:zero(1) +tt:one() tt:one(1) +tt.more:one() tt.more:one(1) + +-- return values (instructions: inspect `fa`) +local function fa() end -- no returns +local function fa() return nil end -- returns nil +local function fa() return 2 end -- return 2 +local function fa(x,y) return 2,x>y end -- return 2, 'boolean' (FIX:returns 2,'unknown') +local function fa(x) if x then return 1,2,3 else return 1,3,'z',nil end return 'z' end + -- returns 1, number, unknown, unknown (note deadcode) +local function fa(x) if x then return 2 end end -- returns unknown (due to implicit return) +local function fa(x) do return 2 end return 3 end -- returns 2 (note deadcode) +local function fa(x) return (function() return 2 end)()+1 end -- returns 3 +local function fa(x) return x end -- return unknown +local x1 = fa(5) -- unknown + -- note: "infer 5" is not implemented (i.e. return values specific + -- to function call arguments) It could infer, however, + -- that fa is a "safe function" to execute. +local function fa(...) return ... end --FIX +local function fa(f) return 2,f() end --FIX + --TODO: multiple returns not inferred + +-- expression lists from function returns +local a1,a1 = (function() return 1,2 end)() -- 1,2 +local function zero() end +local function one() return 'a' end +local function two() return 'a', 'b' end +local a1, a2 = zero() -- nil, nil +local a1, a2 = one() -- 'a', nil +local a1, a2, a3 = two() -- 'a', 'b', nil +local a1, a2, a3 = two(), 'c' -- 'a', 'c', nil +local a1, a2, a3, a4 = 'z', two() -- 'z', 'a', 'b', nil +ga1, ga2, ga3, ga4 = 'z', two() -- 'z', 'a', 'b', nil (global sets) +local tt = {}; tt.ga1, tt.ga2, tt.ga3, tt.ga4 = 'z', two() -- 'z', 'a', 'b', nil (index sets) +local a1, a2, a3 = two(), unknownfunc() -- 'a', unknown, unknown +math.atan2(function() return 2, 3 end) -- FIX: arg count ok +math.atan2(function() return 2, 'x' end) -- FIX: arg type mismatch +math.atan2(unknownfunc()) -- FIX: arg count could be ok +math.atan2(1,2, unknownfunc()) -- FIX: arg count could be ok + +-- deadcode detection +local deadcode +local function f(x) + if false then deadcode() + elseif 0==1 then deadcode() deadcode() + elseif 1==1 then print 'ok' + while 0==1 do deadcode() end + do return end + deadcode() if x then end while 1 do end + else + deadcode() + end +end +--test: do return end deadcode() + +-- error messages +do + local n + local z1,z2 = true,false + local xd1 = z1 + z2 -- error, arithmetic on boolean + local xd2 = true + 5 -- error, arithmetic on boolean literal + local xd3 = n^n -- error, arithmetic on nil + local xd4 = z1.zz -- error, index bool + local xd4b = z1:zz() -- error, index bool in meth call + local xd5 = #z1 -- error, len of bool + local xd6 = g11 + g22 -- error, arithmetic on global nil +end + +-- type inferences +do + local n1, n2 --! context.apply_value('^n.*', number) + local s1, s2 --! context.apply_value('^s.*', string) + local b1, b2 --! context.apply_value('^b.*', boolean) + local x1,y1 = n1+n2, n1+2 -- number + local x2,y2 = n1 or n2, n1 or 2 -- number + local x3,y3 = n1 > n2, n1 > 2 -- boolean + local x4,y4 = -n1, -2 -- number, -2 + local x5,y5 = not n1, not 2 -- boolean, false + local xb1,yb1 = s1+s2, s1+"z" -- number + local xb2,yb2 = s1 or s2, s1 or "z" -- string + local xb3,yb3 = s1 .. s2, s1 .. "z" -- string + local xb4,yb4 = s1 > s2, s1 > "z" -- boolean + local xc1,yc1 = b1 and b2, b1 and true -- boolean + local e1,ey1 = #n1, #2 -- error + local e2,ey2 = -b1, -true -- error + local e3,ey3 = #b1, #true -- error + local xd1 = n1+n2^2 * n2 or 4 -- number + local xe1 = math.sqrt(n1) -- number + local xe2 = math:sqrt() -- number (although nonsensical) + for ii=1,10 do print(ii) end -- number + for a1,a2,a3 in ipairs(t) do print(a1,a2,a3) end -- number, unknown, nil + for a1,a2,a3 in pairs(t) do print(a1,a2,a3) end -- unknown, unknown, nil + for a1,a2,a3 in it(t) do print(a1,a2,a3) end -- unknown, unknown, unknown +end diff --git a/builders/lua-inspect/extman/extman.lua b/builders/lua-inspect/extman/extman.lua new file mode 100644 index 000000000..68b3e8eb6 --- /dev/null +++ b/builders/lua-inspect/extman/extman.lua @@ -0,0 +1,896 @@ +-- Extman is a Lua script manager for SciTE. It enables multiple scripts to capture standard events +-- without interfering with each other. For instance, scite_OnDoubleClick() will register handlers +-- for scripts that need to know when a double-click event has happened. (To know whether it +-- was in the output or editor pane, just test editor.Focus). It provides a useful function scite_Command +-- which allows you to define new commands without messing around with property files (see the +-- examples in the scite_lua directory.) +-- extman defines three new convenience handlers as well: +--scite_OnWord (called when user has entered a word) +--scite_OnEditorLine (called when a line is entered into the editor) +--scite_OnOutputLine (called when a line is entered into the output pane) + +-- this is an opportunity for you to make regular Lua packages available to SciTE +--~ package.path = package.path..';C:\\lang\\lua\\lua\\?.lua' +--~ package.cpath = package.cpath..';c:\\lang\\lua\\?.dll' + +package.cpath = package.cpath..';c:\\lua\\clibs\\?.dll' + + + +-- useful function for getting a property, or a default if not present. +function scite_GetProp(key,default) + local val = props[key] + if val and val ~= '' then return val + else return default end +end + +function scite_GetPropBool(key,default) + local res = scite_GetProp(key,default) + if not res or res == '0' or res == 'false' then return false + else return true + end +end + +local GTK = scite_GetProp('PLAT_GTK') + +local _MarginClick,_DoubleClick,_SavePointLeft = {},{},{} +local _SavePointReached,_Open,_SwitchFile = {},{},{} +local _BeforeSave,_Save,_Char = {},{},{} +local _Word,_LineEd,_LineOut = {},{},{} +local _OpenSwitch = {} +local _UpdateUI = {} +local _UserListSelection +-- new with 1.74! +local _Key = {} +local _DwellStart = {} +local _Close = {} +-- new +local _remove = {} +local append = table.insert +local find = string.find +local size = table.getn +local sub = string.sub +local gsub = string.gsub + + +-- file must be quoted if it contains spaces! +function quote_if_needed(target) + local quote = '"' + if find(target,'%s') and sub(target,1,1) ~= quote then + target = quote..target..quote + end + return target +end + +function OnUserListSelection(tp,str) + if _UserListSelection then + local callback = _UserListSelection + _UserListSelection = nil + return callback(str) + else return false end +end + +local function DispatchOne(handlers,arg) + for i,handler in pairs(handlers) do + local fn = handler + if _remove[fn] then + handlers[i] = nil + _remove[fn] = nil + end + local ret = fn(arg) + if ret then return ret end + end + return false +end + +local function Dispatch4(handlers,arg1,arg2,arg3,arg4) + for i,handler in pairs(handlers) do + local fn = handler + if _remove[fn] then + handlers[i] = nil + _remove[fn] = nil + end + local ret = fn(arg1,arg2,arg3,arg4) + if ret then return ret end + end + return false +end + +DoDispatchOne = DispatchOne -- export this! + +-- these are the standard SciTE Lua callbacks - we use them to call installed extman handlers! +function OnMarginClick() + return DispatchOne(_MarginClick) +end + +function OnDoubleClick() + return DispatchOne(_DoubleClick) +end + +function OnSavePointLeft() + return DispatchOne(_SavePointLeft) +end + +function OnSavePointReached() + return DispatchOne(_SavePointReached) +end + +function OnChar(ch) + return DispatchOne(_Char,ch) +end + +function OnSave(file) + return DispatchOne(_Save,file) +end + +function OnBeforeSave(file) + return DispatchOne(_BeforeSave,file) +end + +function OnSwitchFile(file) + return DispatchOne(_SwitchFile,file) +end + +function OnOpen(file) + return DispatchOne(_Open,file) +end + +function OnUpdateUI() + if editor.Focus then + return DispatchOne(_UpdateUI) + else + return false + end +end + +-- new with 1.74 +function OnKey(key,shift,ctrl,alt) + return Dispatch4(_Key,key,shift,ctrl,alt) +end + +function OnDwellStart(pos,s) + return Dispatch4(_DwellStart,pos,s) +end + +function OnClose() + return DispatchOne(_Close) +end + +-- may optionally ask that this handler be immediately +-- removed after it's called +local function append_unique(tbl,fn,rem) + local once_only + if type(fn) == 'string' then + once_only = fn == 'once' + fn = rem + rem = nil + if once_only then + _remove[fn] = fn + end + else + _remove[fn] = nil + end + local idx + for i,handler in pairs(tbl) do + if handler == fn then idx = i; break end + end + if idx then + if rem then + table.remove(tbl,idx) + end + else + if not rem then + append(tbl,fn) + end + end +end +ex_append_unique = append_unique + +-- this is how you register your own handlers with extman +function scite_OnMarginClick(fn,rem) + append_unique(_MarginClick,fn,rem) +end + +function scite_OnDoubleClick(fn,rem) + append_unique(_DoubleClick,fn,rem) +end + +function scite_OnSavePointLeft(fn,rem) + append_unique(_SavePointLeft,fn,rem) +end + +function scite_OnSavePointReached(fn,rem) + append_unique(_SavePointReached,fn,rem) +end + +function scite_OnOpen(fn,rem) + append_unique(_Open,fn,rem) +end + +function scite_OnSwitchFile(fn,rem) + append_unique(_SwitchFile,fn,rem) +end + +function scite_OnBeforeSave(fn,rem) + append_unique(_BeforeSave,fn,rem) +end + +function scite_OnSave(fn,rem) + append_unique(_Save,fn,rem) +end + +function scite_OnUpdateUI(fn,rem) + append_unique(_UpdateUI,fn,rem) +end + +function scite_OnChar(fn,rem) + append_unique(_Char,fn,rem) +end + +function scite_OnOpenSwitch(fn,rem) + append_unique(_OpenSwitch,fn,rem) +end + +--new 1.74 +function scite_OnKey(fn,rem) + append_unique(_Key,fn,rem) +end + +function scite_OnDwellStart(fn,rem) + append_unique(_DwellStart,fn,rem) +end + +function scite_OnClose(fn,rem) + append_unique(_Close,fn,rem) +end + +local function buffer_switch(f) +--- OnOpen() is also called if we move to a new folder + if not find(f,'[\\/]$') then + DispatchOne(_OpenSwitch,f) + end +end + +scite_OnOpen(buffer_switch) +scite_OnSwitchFile(buffer_switch) + +local next_user_id = 13 -- arbitrary + +-- the handler is always reset! +function scite_UserListShow(list,start,fn) + local separators = {' ', ';', '@', '?', '~', ':'} + local separator + local s = table.concat(list) + for i, sep in ipairs(separators) do + if not string.find(s, sep, 1, true) then + s = table.concat(list, sep, start) + separator = sep + break + end + end + -- we could not find a good separator, set it arbitrarily + if not separator then + separator = '@' + s = table.concat(list, separator, start) + end + _UserListSelection = fn + local pane = editor + if not pane.Focus then pane = output end + pane.AutoCSeparator = string.byte(separator) + pane:UserListShow(next_user_id,s) + pane.AutoCSeparator = string.byte(' ') + return true +end + + local word_start,in_word,current_word +-- (Nicolas) this is in Ascii as SciTE always passes chars in this "encoding" to OnChar +local wordchars = '[A-Za-zÀ-Ýà-ÿ]' -- wuz %w + + local function on_word_char(s) + if not in_word then + if find(s,wordchars) then + -- we have hit a word! + word_start = editor.CurrentPos + in_word = true + current_word = s + end + else -- we're in a word + -- and it's another word character, so collect + if find(s,wordchars) then + current_word = current_word..s + else + -- leaving a word; call the handler + local word_end = editor.CurrentPos + DispatchOne(_Word, {word=current_word, + startp=word_start,endp=editor.CurrentPos, + ch = s + }) + in_word = false + end + end + -- don't interfere with usual processing! + return false + end + +function scite_OnWord(fn,rem) + append_unique(_Word,fn,rem) + if not rem then + scite_OnChar(on_word_char) + else + scite_OnChar(on_word_char,'remove') + end +end + +local last_pos = 0 + +function get_line(pane,lineno) + if not pane then pane = editor end + if not lineno then + local line_pos = pane.CurrentPos + lineno = pane:LineFromPosition(line_pos)-1 + end + -- strip linefeeds (Windows is a special case as usual!) + local endl = 2 + if pane.EOLMode == 0 then endl = 3 end + local line = pane:GetLine(lineno) + if not line then return nil end + return string.sub(line,1,-endl) +end + +-- export this useful function... +scite_Line = get_line + +local function on_line_char(ch,was_output) + if ch == '\n' then + local in_editor = editor.Focus + if in_editor and not was_output then + DispatchOne(_LineEd,get_line(editor)) + return false -- DO NOT interfere with any editor processing! + elseif not in_editor and was_output then + DispatchOne(_LineOut,get_line(output)) + return true -- prevent SciTE from trying to evaluate the line + end + end + return false +end + +local function on_line_editor_char(ch) + return on_line_char(ch,false) +end + +local function on_line_output_char(ch) + return on_line_char(ch,true) +end + +local function set_line_handler(fn,rem,handler,on_char) + append_unique(handler,fn,rem) + if not rem then + scite_OnChar(on_char) + else + scite_OnChar(on_char,'remove') + end +end + +function scite_OnEditorLine(fn,rem) + set_line_handler(fn,rem,_LineEd,on_line_editor_char) +end + +-- with this scheme, there is a primary handler, and secondary prompt handlers +-- can temporarily take charge of input. There is only one prompt in charge +-- at any particular time, however. +local primary_handler + +function scite_OnOutputLine(fn,rem) + if not rem then + if not primary_handler then primary_handler = fn end + end + _LineOut = {} + set_line_handler(fn,rem,_LineOut,on_line_output_char) + if rem and fn ~= primary_handler then + set_line_handler(primary_handler,false,_LineOut,on_line_output_char) + end +end + +local path_pattern +local tempfile +local dirsep + +if GTK then + tempfile = '/tmp/.scite-temp-files' + path_pattern = '(.*)/[^%./]+%.%w+$' + dirsep = '/' +else + tempfile = os.getenv 'TMP' .. '\\scite_temp1' + path_pattern = '(.*)[\\/][^%.\\/]+%.%w+$' + dirsep = '\\' +end + +function path_of(s) + local _,_,res = find(s,path_pattern) + if _ then return res else return s end +end + +local extman_path = path_of(props['ext.lua.startup.script']) +local lua_path = scite_GetProp('ext.lua.directory',extman_path..dirsep..'scite_lua') +props['ext.lua.directory'] = lua_path:gsub('[\\/]$', '') + +fn,err = package.loadlib(extman_path.."/gui.dll","luaopen_gui") +if fn then fn() else + --DISABLED:print(err) +end + + +function extman_Path() + return extman_path +end + +-- this version of scite-gdb uses the new spawner extension library. +local fn,err,spawner_path +if package then loadlib = package.loadlib end +-- by default, the spawner lib sits next to extman.lua +spawner_path = scite_GetProp('spawner.extension.path',extman_path) +if GTK then + fn,err = loadlib(spawner_path..'/unix-spawner-ex.so','luaopen_spawner') +else + fn,err = loadlib(spawner_path..'\\spawner-ex.dll','luaopen_spawner') +end +if fn then + fn() -- register spawner +else + --DISABLED: print('cannot load spawner '..err) +end + +-- a general popen function that uses the spawner library if found; otherwise falls back +-- on os.execute +function scite_Popen(cmd) + if spawner then + return spawner.popen(cmd) + else + cmd = cmd..' > '..tempfile + if GTK then -- io.popen is dodgy; don't use it! + os.execute(cmd) + else + if Execute then -- scite_other was found! + Execute(cmd) + else + os.execute(cmd) + end + end + return io.open(tempfile) + end +end + +function dirmask(mask,isdir) + local attrib = '' + if isdir then + if not GTK then + attrib = ' /A:D ' + else + attrib = ' -F ' + end + end + if not GTK then + mask = gsub(mask,'/','\\') + return 'dir /b '..attrib..quote_if_needed(mask) + else + return 'ls -1 '..attrib..quote_if_needed(mask) + end +end + +-- p = globtopattern(g) +-- +-- Converts glob string (g) into Lua pattern string (p). +-- Always succeeds. +-- +-- Warning: could be better tested. +-- +-- (c) 2008 D.Manura, Licensed under the same terms as Lua (MIT License). +local function globtopattern(g) + -- Some useful references: + -- - apr_fnmatch in Apache APR. For example, + -- http://apr.apache.org/docs/apr/1.3/group__apr__fnmatch.html + -- which cites POSIX 1003.2-1992, section B.6. + + local p = "^" -- pattern being built + local i = 0 -- index in g + local c -- char at index i in g. + + -- unescape glob char + local function unescape() + if c == '\\' then + i = i + 1; c = g:sub(i,i) + if c == '' then + p = '[^]' + return false + end + end + return true + end + + -- escape pattern char + local function escape(c) + return c:match("^%w$") and c or '%' .. c + end + + -- Convert tokens at end of charset. + local function charset_end() + while 1 do + if c == '' then + p = '[^]' + break + elseif c == ']' then + p = p .. ']' + break + else + if not unescape() then break end + local c1 = c + i = i + 1; c = g:sub(i,i) + if c == '' then + p = '[^]' + break + elseif c == '-' then + i = i + 1; c = g:sub(i,i) + if c == '' then + p = '[^]' + break + elseif c == ']' then + p = p .. escape(c1) .. '%-]' + break + else + if not unescape() then break end + p = p .. escape(c1) .. '-' .. escape(c) + end + elseif c == ']' then + p = p .. escape(c1) .. ']' + break + else + p = p .. escape(c1) + i = i - 1 -- put back + end + end + i = i + 1; c = g:sub(i,i) + end + end + + -- Convert tokens in charset. + local function charset() + p = p .. '[' + i = i + 1; c = g:sub(i,i) + if c == '' or c == ']' then + p = p .. '[^]' + elseif c == '^' or c == '!' then + p = p .. '^' + i = i + 1; c = g:sub(i,i) + if c == ']' then + -- ignored + else + charset_end() + end + else + charset_end() + end + end + + -- Convert tokens. + while 1 do + i = i + 1; c = g:sub(i,i) + if c == '' then + p = p .. '$' + break + elseif c == '?' then + p = p .. '.' + elseif c == '*' then + p = p .. '.*' + elseif c == '[' then + charset() + elseif c == '\\' then + i = i + 1; c = g:sub(i,i) + if c == '' then + p = p .. '\\$' + break + end + p = p .. escape(c) + else + p = p .. escape(c) + end + end + return p +end + +-- grab all files matching @mask, which is assumed to be a path with a wildcard. +-- 2008-06-27 Now uses David Manura's globtopattern(), which is not fooled by cases +-- like test.lua and test.lua~ ! +function scite_Files(mask) + local f,path,pat,cmd,_ + if not GTK then + cmd = dirmask(mask) + path = mask:match('(.*\\)') or '.\\' + local file = mask:match('([^\\]*)$') + pat = globtopattern(file) + else + cmd = 'ls -1 '..mask + path = '' + end + f = scite_Popen(cmd) + local files = {} + if not f then return files end + + for line in f:lines() do + if not pat or line:match(pat) then + append(files,path..line) + end + end + f:close() + return files +end + +-- grab all directories in @path, excluding anything that matches @exclude_path +-- As a special exception, will also any directory called 'examples' ;) +function scite_Directories(path,exclude_pat) + local cmd + --print(path) + if not GTK then + cmd = dirmask(path..'\\*.',true) + else + cmd = dirmask(path,true) + end + path = path..dirsep + local f = scite_Popen(cmd) + local files = {} + if not f then return files end + for line in f:lines() do +-- print(line) + if GTK then + if line:sub(-1,-1) == dirsep then + line = line:sub(1,-2) + else + line = nil + end + end + if line and not line:find(exclude_pat) and line ~= 'examples' then + append(files,path..line) + end + end + f:close() + return files +end + +function scite_FileExists(f) + local f = io.open(f) + if not f then return false + else + f:close() + return true + end +end + +function scite_CurrentFile() + return props['FilePath'] +end + +-- (Nicolas) +if GTK then + function scite_DirectoryExists(path) + local result = os.execute('test -d "'..path..'"') + if result == -1 then return true end -- FIX: why this return -1 on SciTE 2.2.5/Ubuntu? + return result == 0 + end +else + -- what is the Win32 equivalent?? + function scite_DirectoryExists(path) + return true + end +end + +function split(s,delim) + res = {} + while true do + p = find(s,delim) + if not p then + append(res,s) + return res + end + append(res,sub(s,1,p-1)) + s = sub(s,p+1) + end +end + +function splitv(s,delim) + return unpack(split(s,delim)) +end + +local idx = 10 +local shortcuts_used = {} +local alt_letter_map = {} +local alt_letter_map_init = false +local name_id_map = {} + +local function set_command(name,cmd,mode) + local _,_,pattern,md = find(mode,'(.+){(.+)}') + if not _ then + pattern = mode + md = 'savebefore:no' + end + local which = '.'..idx..pattern + props['command.name'..which] = name + props['command'..which] = cmd + props['command.subsystem'..which] = '3' + props['command.mode'..which] = md + name_id_map[name] = 1100+idx + return which +end + +local function check_gtk_alt_shortcut(shortcut,name) + -- Alt+ shortcuts don't work for GTK, so handle them directly... + local _,_,letter = shortcut:find('Alt%+([A-Z])$') + if _ then + alt_letter_map[letter:lower()] = name + if not alt_letter_map_init then + alt_letter_map_init = true + scite_OnKey(function(key,shift,ctrl,alt) + if alt and key < 255 then + local ch = string.char(key) + if alt_letter_map[ch] then + scite_MenuCommand(alt_letter_map[ch]) + end + end + end) + end + end +end + +local function set_shortcut(shortcut,name,which) + if shortcut == 'Context' then + local usr = 'user.context.menu' + if props[usr] == '' then -- force a separator + props[usr] = '|' + end + props[usr] = props[usr]..'|'..name..'|'..(1100+idx)..'|' + else + local cmd = shortcuts_used[shortcut] + if cmd then + print('Error: shortcut already used in "'..cmd..'"') + else + shortcuts_used[shortcut] = name + if GTK then check_gtk_alt_shortcut(shortcut,name) end + props['command.shortcut'..which] = shortcut + end + end +end + +-- allows you to bind given Lua functions to shortcut keys +-- without messing around in the properties files! +-- Either a string or a table of strings; the string format is either +-- menu text|Lua command|shortcut +-- or +-- menu text|Lua command|mode|shortcut +-- where 'mode' is the file extension which this command applies to, +-- e.g. 'lua' or 'c', optionally followed by {mode specifier}, where 'mode specifier' +-- is the same as documented under 'command.mode' +-- 'shortcut' can be a usual SciTE key specifier, like 'Alt+R' or 'Ctrl+Shift+F1', +-- _or_ it can be 'Context', meaning that the menu item should also be added +-- to the right-hand click context menu. +function scite_Command(tbl) + if type(tbl) == 'string' then + tbl = {tbl} + end + for i,v in pairs(tbl) do + local name,cmd,mode,shortcut = splitv(v,'|') + if not shortcut then + shortcut = mode + mode = '.*' + else + mode = '.'..mode + end + -- has this command been defined before? + local old_idx = 0 + for ii = 10,idx do + if props['command.name.'..ii..mode] == name then old_idx = ii end + end + if old_idx == 0 then + local which = set_command(name,cmd,mode) + if shortcut then + set_shortcut(shortcut,name,which) + end + idx = idx + 1 + end + end +end + + +-- use this to launch Lua Tool menu commands directly by name +-- (commands are not guaranteed to work properly if you just call the Lua function) +function scite_MenuCommand(cmd) + if type(cmd) == 'string' then + cmd = name_id_map[cmd] + if not cmd then return end + end + scite.MenuCommand(cmd) +end + +local loaded = {} +local current_filepath + +-- this will quietly fail.... +local function silent_dofile(f) + if scite_FileExists(f) then + if not loaded[f] then + dofile(f) + loaded[f] = true + end + return true + end + return false +end + +function scite_dofile(f) + f = extman_path..'/'..f + silent_dofile(f) +end + +function scite_require(f) + local path = lua_path..dirsep..f + if not silent_dofile(path) then + silent_dofile(current_filepath..dirsep..f) + end +end + +if not GTK then + scite_dofile 'scite_other.lua' +end + +if not scite_DirectoryExists(lua_path) then + print('Error: directory '..lua_path..' not found') + return +end + +function load_script_list(script_list,path) + if not script_list then + print('Error: no files found in '..path) + else + current_filepath = path + for i,file in pairs(script_list) do + silent_dofile(file) + end + end +end + +-- Load all scripts in the lua_path (usually 'scite_lua'), including within any subdirectories +-- that aren't 'examples' or begin with a '_' +local script_list = scite_Files(lua_path..dirsep..'*.lua') +load_script_list(script_list,lua_path) +local dirs = scite_Directories(lua_path,'^_') +for i,dir in ipairs(dirs) do + load_script_list(scite_Files(dir..dirsep..'*.lua'),dir) +end + +function scite_WordAtPos(pos) + if not pos then pos = editor.CurrentPos end + local p2 = editor:WordEndPosition(pos,true) + local p1 = editor:WordStartPosition(pos,true) + if p2 > p1 then + return editor:textrange(p1,p2) + end +end + +function scite_GetSelOrWord() + local s = editor:GetSelText() + if s == '' then + return scite_WordAtPos() + else + return s + end +end + +--~ scite_Command 'Reload Script|reload_script|Shift+Ctrl+R' + +--~ function reload_script() +--~ current_file = scite_CurrentFile() +--~ print('Reloading... '..current_file) +--~ loaded[current_file] = false +--~ silent_dofile(current_file) +--~ end + +--~ require"remdebug.engine" +--~ remdebug.engine.start() + diff --git a/builders/lua-inspect/extman/scite_lua/bit.luax b/builders/lua-inspect/extman/scite_lua/bit.luax new file mode 100644 index 000000000..692abbca4 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/bit.luax @@ -0,0 +1,241 @@ +--[[--------------- +LuaBit v0.3 +------------------- +a bitwise operation lib for lua. + +http://luaforge.net/projects/bit/ + +Under the MIT license. + +copyright(c) 2006 hanzhao (abrash_han@hotmail.com) +--]]--------------- + +do + +------------------------ +-- bit lib implementions + +local function check_int(n) + -- checking not float + if(n - math.floor(n) > 0) then + error("trying to use bitwise operation on non-integer!") + end +end + +local function to_bits(n) + check_int(n) + if(n < 0) then + -- negative + return to_bits(bit.bnot(math.abs(n)) + 1) + end + -- to bits table + local tbl = {} + local cnt = 1 + while (n > 0) do + local last = math.mod(n,2) + if(last == 1) then + tbl[cnt] = 1 + else + tbl[cnt] = 0 + end + n = (n-last)/2 + cnt = cnt + 1 + end + + return tbl +end + +local function tbl_to_number(tbl) + local n = table.getn(tbl) + + local rslt = 0 + local power = 1 + for i = 1, n do + rslt = rslt + tbl[i]*power + power = power*2 + end + + return rslt +end + +local function expand(tbl_m, tbl_n) + local big = {} + local small = {} + if(table.getn(tbl_m) > table.getn(tbl_n)) then + big = tbl_m + small = tbl_n + else + big = tbl_n + small = tbl_m + end + -- expand small + for i = table.getn(small) + 1, table.getn(big) do + small[i] = 0 + end + +end + +local function bit_or(m, n) + local tbl_m = to_bits(m) + local tbl_n = to_bits(n) + expand(tbl_m, tbl_n) + + local tbl = {} + local rslt = math.max(table.getn(tbl_m), table.getn(tbl_n)) + for i = 1, rslt do + if(tbl_m[i]== 0 and tbl_n[i] == 0) then + tbl[i] = 0 + else + tbl[i] = 1 + end + end + + return tbl_to_number(tbl) +end + +local function bit_and(m, n) + local tbl_m = to_bits(m) + local tbl_n = to_bits(n) + expand(tbl_m, tbl_n) + + local tbl = {} + local rslt = math.max(table.getn(tbl_m), table.getn(tbl_n)) + for i = 1, rslt do + if(tbl_m[i]== 0 or tbl_n[i] == 0) then + tbl[i] = 0 + else + tbl[i] = 1 + end + end + + return tbl_to_number(tbl) +end + +local function bit_not(n) + + local tbl = to_bits(n) + local size = math.max(table.getn(tbl), 32) + for i = 1, size do + if(tbl[i] == 1) then + tbl[i] = 0 + else + tbl[i] = 1 + end + end + return tbl_to_number(tbl) +end + +local function bit_xor(m, n) + local tbl_m = to_bits(m) + local tbl_n = to_bits(n) + expand(tbl_m, tbl_n) + + local tbl = {} + local rslt = math.max(table.getn(tbl_m), table.getn(tbl_n)) + for i = 1, rslt do + if(tbl_m[i] ~= tbl_n[i]) then + tbl[i] = 1 + else + tbl[i] = 0 + end + end + + --table.foreach(tbl, print) + + return tbl_to_number(tbl) +end + +local function bit_rshift(n, bits) + check_int(n) + + local high_bit = 0 + if(n < 0) then + -- negative + n = bit_not(math.abs(n)) + 1 + high_bit = 2147483648 -- 0x80000000 + end + + for i=1, bits do + n = n/2 + n = bit_or(math.floor(n), high_bit) + end + return math.floor(n) +end + +-- logic rightshift assures zero filling shift +local function bit_logic_rshift(n, bits) + check_int(n) + if(n < 0) then + -- negative + n = bit_not(math.abs(n)) + 1 + end + for i=1, bits do + n = n/2 + end + return math.floor(n) +end + +local function bit_lshift(n, bits) + check_int(n) + + if(n < 0) then + -- negative + n = bit_not(math.abs(n)) + 1 + end + + for i=1, bits do + n = n*2 + end + return bit_and(n, 4294967295) -- 0xFFFFFFFF +end + +local function bit_xor2(m, n) + local rhs = bit_or(bit_not(m), bit_not(n)) + local lhs = bit_or(m, n) + local rslt = bit_and(lhs, rhs) + return rslt +end + +-------------------- +-- bit lib interface + +bit = { + -- bit operations + bnot = bit_not, + band = bit_and, + bor = bit_or, + bxor = bit_xor, + brshift = bit_rshift, + blshift = bit_lshift, + bxor2 = bit_xor2, + blogic_rshift = bit_logic_rshift, + + -- utility func + tobits = to_bits, + tonumb = tbl_to_number, +} + +end + +--[[ +for i = 1, 100 do + for j = 1, 100 do + if(bit.bxor(i, j) ~= bit.bxor2(i, j)) then + error("bit.xor failed.") + end + end +end +--]] + + + + + + + + + + + + + diff --git a/builders/lua-inspect/extman/scite_lua/borland.lua b/builders/lua-inspect/extman/scite_lua/borland.lua new file mode 100644 index 000000000..e58b647e8 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/borland.lua @@ -0,0 +1,58 @@ +-- demonstrates how to capture multiple key sequences, like 'ctrl-k 1', with extman. +-- This is used to implement Borland-style markers. +scite_Command { + 'ctrl-k|do_ctrl_command k|Ctrl+K', + 'ctrl-q|do_ctrl_command q|Ctrl+Q', +} + +local gMarksMap = {} +local gMarks = {} + +scite_OnOpen(function(f) + gMarksMap[f] = {} + end) + +scite_OnSwitchFile(function(f) + gMarks = gMarksMap[f] +end) + +function current_line() + return editor:LineFromPosition(editor.CurrentPos)+1 +end + +local markers_defined = false +local base = 9 + +function define_markers() + local zero = string.byte('0') + for i = 1,9 do + editor:MarkerDefine(i+base,SC_MARK_CHARACTER + zero + i) + end + markers_defined = true +end + +function do_ctrl_command(key) + editor:BeginUndoAction() + scite_OnChar('once',function (ch) + editor:EndUndoAction() + editor:Undo() + local num = tonumber(ch) + local mark = num and gMarks[num] + local line = current_line() + if key == 'k' and num then + if not markers_defined then define_markers() end + if mark then -- clear mark + gMarks[num] = nil + editor:MarkerDelete(line-1,num+base) + else + gMarks[num] = line + editor:MarkerAdd(line-1,num+base) + print 'mark' + end + elseif key == 'q' and mark then + editor:GotoLine(mark-1) + if ctags_center_pos then ctags_center_pos(mark-1) end + end + return true + end) +end diff --git a/builders/lua-inspect/extman/scite_lua/ctagsdx.lua b/builders/lua-inspect/extman/scite_lua/ctagsdx.lua new file mode 100644 index 000000000..a0fab0ca8 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/ctagsdx.lua @@ -0,0 +1,214 @@ +-- browse a tags database from SciTE! +-- Set this property: +-- ctags.path.cxx= +-- 1. Multiple tags are handled correctly; a drop-down +-- list is presented +-- 2. There is a full stack of marks available. +-- 3. If ctags.path.cxx is not defined, will try to find a tags file in the current dir. + +scite_Command { + 'Find Tag|find_ctag $(CurrentWord)|Ctrl+.', + 'Go to Mark|goto_mark|Alt+.', + 'Set Mark|set_mark|Ctrl+\'', + 'Select from Mark|select_mark|Ctrl+/', +} + +local gMarkStack = {} +local sizeof = table.getn +local push = table.insert +local pop = table.remove +local top = function(s) return s[sizeof(s)] end + +-- this centers the cursor position +-- easy enough to make it optional! +function ctags_center_pos(line) + if not line then + line = editor:LineFromPosition(editor.CurrentPos) + end + local top = editor.FirstVisibleLine + local middle = top + editor.LinesOnScreen/2 + editor:LineScroll(0,line - middle) +end + +local function open_file(file,line,was_pos) + scite.Open(file) + if not was_pos then + editor:GotoLine(line) + ctags_center_pos(line) + else + editor:GotoPos(line) + ctags_center_pos() + end +end + +function set_mark() + push(gMarkStack,{file=props['FilePath'],pos=editor.CurrentPos}) +end + +function goto_mark() + local mark = pop(gMarkStack) + if mark then + open_file(mark.file,mark.pos,true) + end +end + +function select_mark() +local mark = top(gMarkStack) +print (mark) +if mark then + local p1 = mark.pos + local p2 = editor.CurrentPos + print(p1..','..p2) + editor:SetSel(p1,p2) + end +end + +local find = string.find + +local function extract_path(path) +-- given a full path, find the directory part + local s1,s2 = find(path,'/[^/]+$') + if not s1 then -- try backslashes! + s1,s2 = find(path,'\\[^\\]+$') + end + if s1 then + return string.sub(path,1,s1-1) + else + return nil + end +end + +local function ReadTagFile(file) + local f = io.open(file) + if not f then return nil end + local tags = {} + -- now we can pick up the tags! + for line in f:lines() do + -- skip if line is comment + if find(line,'^[^!]') then + local _,_,tag = find(line,'^([^\t]+)\t') + local existing_line = tags[tag] + if not existing_line then + tags[tag] = line..'@' + else + tags[tag] = existing_line..'@'..line + end + end + end + return tags +end + +local gTagFile +local tags + +local function OpenTag(tag) + -- ask SciTE to open the file + local file_name = tag.file + local path = extract_path(gTagFile) + if path then file_name = path..'/'..file_name end + set_mark() + scite.Open(file_name) + -- depending on what kind of tag, either search for the pattern, + -- or go to the line. + local pattern = tag.pattern + if type(pattern) == 'string' then + local p1 = editor:findtext(pattern) + if p1 then + editor:GotoPos(p1) + ctags_center_pos() + end + else + local tag_line = pattern + editor:GotoLine(tag_line) + ctags_center_pos(tag_line) + end +end + +function locate_tags(dir) +--function test(dir) + local filefound = nil + local slash, f + _,_,slash = string.find(dir,"([/\\])") + while dir do + file = dir .. slash .. "tags" + --print ( "---" .. file) + f = io.open(file) + if f then + filefound = file + break + end + _,_,dir = string.find(dir,"(.+)[/\\][^/\\]+$") + --print(dir) + end + return filefound +end + +function find_ctag(f,partial) + -- search for tags files first + local result + result = props['ctags.path.cxx'] + if not result then + result = locate_tags(props['FileDir']) + end + if not result then + print("No tags found!") + return + end + if result ~= gTagFile then + --print("Reloading tag from:"..result) + gTagFile = result + tags = ReadTagFile(gTagFile) + end + if partial then + result = '' + for tag,val in tags do + if find(tag,f) then + result = result..val..'@' + end + end + else + result = tags[f] + end + + if not result then return end -- not found + local matches = {} + local k = 0; + for line in string.gfind(result,'([^@]+)@') do + k = k + 1 + -- split this into the three tab-separated fields + -- _extended_ ctags format ends in ;" + local s1,s2,tag_name,file_name,tag_pattern = find(line, + '([^\t]*)\t([^\t]*)\t(.*)') + -- for Exuberant Ctags + _,_,s3 = find(tag_pattern,'(.*);\"') + if s3 then + tag_pattern = s3 + end + s1 = find(tag_pattern,'$*/$') + if s1 ~= nil then + tag_pattern = string.sub(tag_pattern,3,s1-1) + tag_pattern = string.gsub(tag_pattern,'\\/','/') + matches[k] = {tag=f,file=file_name,pattern=tag_pattern} + else + local tag_line = tonumber(tag_pattern)-1 + matches[k] = {tag=f,file=file_name,pattern=tag_line} + end + end + + if k == 0 then return end + if k > 1 then -- multiple tags found + local list = {} + for i,t in ipairs(matches) do + table.insert(list,i..' '..t.file..':'..t.pattern) + end + scite_UserListShow(list,1,function(s) + local _,_,tok = find(s,'^(%d+)') + local idx = tonumber(tok) -- very important! + OpenTag(matches[idx]) + end) + else + OpenTag(matches[1]) + end +end + + diff --git a/builders/lua-inspect/extman/scite_lua/luainspect.lua b/builders/lua-inspect/extman/scite_lua/luainspect.lua new file mode 100644 index 000000000..96c3df64d --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/luainspect.lua @@ -0,0 +1,8 @@ +-- This installs LuaInspect in SciTE. + +-- If necessary, edit the following path to match your system. +local LUAINSPECT_PATH = props['ext.lua.directory'] .. '/../..' -- "c:/lua-inspect" +package.path = package.path .. ";" .. LUAINSPECT_PATH .. "/metalualib/?.lua" +package.path = package.path .. ";" .. LUAINSPECT_PATH .. "/lib/?.lua" +require "luainspect.scite" : install() + diff --git a/builders/lua-inspect/extman/scite_lua/prompt.lua b/builders/lua-inspect/extman/scite_lua/prompt.lua new file mode 100644 index 000000000..0d194a494 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/prompt.lua @@ -0,0 +1,91 @@ + scite_Command('Last Command|do_command_list|Ctrl+Alt+P') + + local prompt = '> ' + local history_len = 4 + local prompt_len = string.len(prompt) + print 'Scite/Lua' + trace(prompt) + + function load(file) + if not file then file = props['FilePath'] end + dofile(file) + end + + function edit(file) + scite.Open(file) + end + + local sub = string.sub + local commands = {} + + local function strip_prompt(line) + if sub(line,1,prompt_len) == prompt then + line = sub(line,prompt_len+1) + end + return line + end + +-- obviously table.concat is much more efficient, but requires that the table values +-- be strings. +function join(tbl,delim,start,finish) + local n = table.getn(tbl) + local res = '' + -- this is a hack to work out if a table is 'list-like' or 'map-like' + local index1 = n > 0 and tbl[1] + local index2 = n > 1 and tbl[2] + if index1 and index2 then + for i,v in ipairs(tbl) do + res = res..delim..tostring(v) + end + else + for i,v in pairs(tbl) do + res = res..delim..tostring(i)..'='..tostring(v) + end + end + return string.sub(res,2) +end + +function pretty_print(...) + for i,val in ipairs(arg) do + if type(val) == 'table' then + print('{'..join(val,',',1,20)..'}') + elseif type(val) == 'string' then + print("'"..val.."'") + else + print(val) + end + end +end + + scite_OnOutputLine (function (line) + line = strip_prompt(line) + table.insert(commands,1,line) + if table.getn(commands) > history_len then + table.remove(commands,history_len+1) + end + if sub(line,1,1) == '=' then + line = 'pretty_print('..sub(line,2)..')' + end + local f,err = loadstring(line,'local') + if not f then + print(err) + else + local ok,res = pcall(f) + if ok then + if res then print('result= '..res) end + else + print(res) + end + end + trace(prompt) + return true +end) + +function insert_command(cmd) + output:AppendText(cmd) + output:GotoPos(output.Length) +end + +function do_command_list() + scite_UserListShow(commands,1,insert_command) +end diff --git a/builders/lua-inspect/extman/scite_lua/select_block.lua b/builders/lua-inspect/extman/scite_lua/select_block.lua new file mode 100644 index 000000000..61d5075e3 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/select_block.lua @@ -0,0 +1,33 @@ +-- this experimental script makes it easy to select blocks with a single click. +-- The usual behaviour is to select the whole line, and if that line happens to be a fold line +-- then select the rest of that block. + +scite_require 'bit.luax' + +function line_selected() +-- if not scite_GetProp('fold') then return end + local s1 = editor.SelectionStart + local s2 = editor.SelectionEnd + if s2 > s1 then -- non-trivial selection + local line = editor:LineFromPosition(s1) + if editor:PositionFromLine(line) > s1 then + return -- because selection didn't start at begining of line + end + if s2 == editor:PositionFromLine(line+1) then -- whole line selected! + local lev = editor.FoldLevel[line] + if bit.band(lev,SC_FOLDLEVELHEADERFLAG) then -- a fold line + local lastl = editor:GetLastChild(line,-1) + s2 = editor:PositionFromLine(lastl+1) + -- hack: a fold line beginning with a '{' is not where we want to start... + if string.find(editor:GetLine(line),'^%s*{') then + s1 = editor:PositionFromLine(line-1) + end + editor.Anchor = s2 + editor.CurrentPos = s1 + end + end + end +end + +scite_OnUpdateUI(line_selected) + diff --git a/builders/lua-inspect/extman/scite_lua/select_string.lua b/builders/lua-inspect/extman/scite_lua/select_string.lua new file mode 100644 index 000000000..4f0bf2581 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/select_string.lua @@ -0,0 +1,25 @@ +-- this extends the usual double-click behaviour; any group of chars with the same style +-- (such as a string or a comment) will be extended. It is called immediately after the +-- default behaviour, which selects words. If a word was selected, then the cursor will +-- be at the end, and we ignore this case. + +function expand_same_style() + local pos = editor.CurrentPos + local style = editor.StyleAt[pos] + if style == 0 or not editor.Focus then return end + local p = pos + while p > -1 and editor.StyleAt[p] == style do + p = p - 1 + end + local pstart = p+1 + if pstart == pos then return end -- we're at the end! + p = pos + local sz = editor.Length-1 + while p < sz and editor.StyleAt[p] == style do + p = p + 1 + end + editor:SetSel(pstart,p) +end + +scite_OnDoubleClick(expand_same_style) + diff --git a/builders/lua-inspect/extman/scite_lua/switch_buffers.lua b/builders/lua-inspect/extman/scite_lua/switch_buffers.lua new file mode 100644 index 000000000..e97d393ca --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/switch_buffers.lua @@ -0,0 +1,31 @@ +--switch_buffers.lua +--drops down a list of buffers, in recently-used order + +scite_Command 'Switch Buffer|do_buffer_list|Alt+F12' +scite_Command 'Last Buffer|last_buffer|Ctrl+F12' + +local buffers = {} + +scite_OnOpenSwitch(function(f) +--- swop the new current buffer with the last one! + local idx + for i,file in ipairs(buffers) do + if file == f then idx = i; break end + end + if idx then + table.remove(buffers,idx) + table.insert(buffers,1,f) + else + table.insert(buffers,1,f) + end +end) + +function last_buffer() + if table.getn(buffers) > 1 then + scite.Open(buffers[2]) + end +end + +function do_buffer_list() + scite_UserListShow(buffers,2,scite.Open) +end diff --git a/builders/lua-inspect/extman/scite_lua/switch_headers.lua b/builders/lua-inspect/extman/scite_lua/switch_headers.lua new file mode 100644 index 000000000..c57dafd39 --- /dev/null +++ b/builders/lua-inspect/extman/scite_lua/switch_headers.lua @@ -0,0 +1,37 @@ +-- toggles between C++ source files and corresponding header files +scite_Command('Switch Source/Header|switch_source_header|*.c|Shift+Ctrl+H') +local cpp_exts = {'cpp','cxx','c++','c'} +local hpp_exts = {'h','hpp'} + +local function within(list,val) + for i,v in list do + if val == v then return true end + end + return false +end + +local function does_exist(basename,extensions) + for i,ext in extensions do + local f = basename..'.'..ext + if scite_FileExists(f) then return f end + end + return nil +end + +function switch_source_header() + local file = props['FilePath'] + local ext = props['FileExt'] + local basename = props['FileDir']..'/'..props['FileName'] + if within(cpp_exts,ext) then + other = does_exist(basename,hpp_exts) + elseif within(hpp_exts,ext) then + other = does_exist(basename,cpp_exts) + else + print('not a C++ file',file); return + end + if not other then + print('source/header does not exist',file) + else + scite.Open(other) + end + end diff --git a/builders/lua-inspect/htmllib/jquery-1.4.2.min.js b/builders/lua-inspect/htmllib/jquery-1.4.2.min.js new file mode 100644 index 000000000..7c2430802 --- /dev/null +++ b/builders/lua-inspect/htmllib/jquery-1.4.2.min.js @@ -0,0 +1,154 @@ +/*! + * jQuery JavaScript Library v1.4.2 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Sat Feb 13 22:33:48 2010 -0500 + */ +(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, +Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& +(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, +a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== +"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, +function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
a"; +var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, +parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= +false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= +s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, +applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; +else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, +a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== +w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, +cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, +function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); +k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), +C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= +e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& +f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; +if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", +e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, +"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, +d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, +e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); +t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| +g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, +CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, +g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, +text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, +setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= +h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== +"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, +h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& +q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; +if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); +(function(){var g=s.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: +function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= +{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== +"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", +d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? +a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== +1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, +""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); +return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", +""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= +c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? +c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= +function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= +Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, +"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= +a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= +a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== +"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, +serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), +function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, +global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& +e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? +"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== +false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= +false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", +c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| +d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); +g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== +1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== +"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; +if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== +"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| +c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; +this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= +this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, +e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
"; +a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); +c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, +d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- +f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": +"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in +e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); diff --git a/builders/lua-inspect/htmllib/luainspect.css b/builders/lua-inspect/htmllib/luainspect.css new file mode 100644 index 000000000..3fccd1cf2 --- /dev/null +++ b/builders/lua-inspect/htmllib/luainspect.css @@ -0,0 +1,33 @@ +/* LuaInspect CSS styles */ + +.id { cursor: pointer } +.id.local { color: #000080; } +.id.local.param { color: #000040 } +.id.local.upvalue { color: #0000ff } +.id.local.unused { color: #ffffff; background-color: #000080 } +.id.local.ignore { color: inherit; background-color: inherit } +.id.global.known { color: #800000 } +.id.global.unknown { color: white; background-color: red } +.id.field.known { color: #600000 } +.id.field.unknown { color: #c00000 } +.id.mutatebind { font-style: italic } +.comment { color: #008000 } +.string { color: #00c000 } +.keyword { color: #505050; font-weight: bold } +.keyword.highlight { text-decoration: underline } +.masking { text-decoration: underline } +.masked { /*text-decoration: line-through*/ } +.ignore { text-decoration: inherit } +.warn { border-bottom:1px dotted #808000 } + +.id.highlight {background-color: #ffe0e0} +.lua-source-linenums .highlight {background-color: #e0e0e0} + + +.info { position: absolute; display: none; padding: 0.5em; background-color: #f0f0f0; border: 1px solid #808080; margin: 1.5em 0.5em } + +.lua-source { line-height: 14pt; font-size:12pt; font-size:90% } +.lua-source-linenums { float: left; } +.lua-source-content { float: left; margin-left: 1em; } +.lua-source-clear { clear: both; } +/* line-height: http://stackoverflow.com/questions/1427426/text-not-aligning-in-html-using-css */ diff --git a/builders/lua-inspect/htmllib/luainspect.js b/builders/lua-inspect/htmllib/luainspect.js new file mode 100644 index 000000000..fb1852de1 --- /dev/null +++ b/builders/lua-inspect/htmllib/luainspect.js @@ -0,0 +1,66 @@ +// LuaInspect (c) 2010 David Manura, MIT License. + +function get_line_of_domobject(obj) { + var line = $(obj).text().match(/used-line:(\d+)/); + if (line) { line = line[1]; } + return line; +} + +function get_linerange_of_objects(jobject) { + var maxlinenum; var minlinenum; + jobject.next().each(function() { + var linenum = get_line_of_domobject(this); + if (linenum) { + minlinenum = (minlinenum==null) ? linenum : Math.min(minlinenum, linenum); + maxlinenum = (maxlinenum==null) ? linenum : Math.max(maxlinenum, linenum); + } + }); + return [minlinenum, maxlinenum]; +} + +function highlight_id(aclass, enable) { + var methname = enable ? "addClass" : "removeClass"; + $("." + aclass)[methname]("highlight"); + var linenums = get_linerange_of_objects($("." + aclass)); + if (linenums) { for (var i=linenums[0]; i <= linenums[1]; i++) { + $('#L'+i)[methname]("highlight"); + }} +} + +function highlightSameClass(obj, enable) { + var classes = obj.attr('class').split(' '); + for (var i in classes) { + var aclass = classes[i]; + if (aclass.match(/^id\w*\d+/)) { + highlight_id(aclass, enable); + } + } +} + +$(document).ready(function() { + $(".id").hover( + function() { + var tip = $(this).next('span'); + tip.stop(true, true).animate({opacity: "show"}, "slow"); + + highlightSameClass($(this), true); + }, + function() { + var tip = $(this).next('span'); + tip.animate({opacity: "hide"}, "fast"); + highlightSameClass($(this), false); + } + ); + $(".keyword").hover( + function() { + highlightSameClass($(this), true); + }, + function() { + highlightSameClass($(this), false); + } + ); +}); + +//.mousemove(function(kmouse) { +// $tip.css({left:kmouse.pageX+15, top:kmouse.pageY+100}); +// }) diff --git a/builders/lua-inspect/lib/luainspect/ast.lua b/builders/lua-inspect/lib/luainspect/ast.lua new file mode 100644 index 000000000..000a28910 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/ast.lua @@ -0,0 +1,929 @@ +-- luainspect.ast - Lua Abstract Syntax Tree (AST) and token list operations. +-- +-- Two main structures are maintained. A Metalua-style AST represents the +-- nested syntactic structure obtained from the parse. +-- A separate linear ordered list of tokens represents the syntactic structure +-- from the lexing, including line information (character positions only not row/columns), +-- comments, and keywords, which is originally built from the lineinfo attributes +-- injected by Metalua into the AST (IMPROVE: it probably would be simpler +-- to obtain this from the lexer directly rather then inferring it from the parsing). +-- During AST manipulations, the lineinfo maintained in the AST is ignored +-- because it was found more difficult to maintain and not in the optimal format. +-- +-- The contained code deals with +-- - Building the AST from source. +-- - Building the tokenlist from the AST lineinfo. +-- - Querying the AST+tokenlist. +-- - Modifying the AST+tokenlist (including incremental parsing source -> AST) +-- - Annotating the AST with navigational info (e.g. parent links) to assist queries. +-- - Dumping the tokenlist for debugging. +-- +-- (c) 2010 David Manura, MIT License. + + +--! require 'luainspect.typecheck' (context) + +-- boilerplate/utility +-- LUA_PATH="?.lua;/path/to/metalua/src/compiler/?.lua;/path/to/metalua/src/lib/?.lua" +-- import modules -- order is important +require "lexer" +require "gg" +require "mlp_lexer" +require "mlp_misc" +require "mlp_table" +require "mlp_meta" +require "mlp_expr" +require "mlp_stat" +--require "mlp_ext" +_G.mlc = {} -- make gg happy +-- Metalua:IMPROVE: make above imports simpler + +local M = {} + +--[=TESTSUITE +-- utilities +local ops = {} +ops['=='] = function(a,b) return a == b end +local function check(opname, a, b) + local op = assert(ops[opname]) + if not op(a,b) then + error("fail == " .. tostring(a) .. " " .. tostring(b)) + end +end +--]=] + +-- CATEGORY: debug +local function DEBUG(...) + if LUAINSPECT_DEBUG then + print('DEBUG:', ...) + end +end + + +-- Converts character position to row,column position in string src. +-- Add values are 1-indexed. +function M.pos_to_linecol(pos, src) + local linenum = 1 + local lasteolpos = 0 + for eolpos in src:gmatch"()\n" do + if eolpos > pos then break end + linenum = linenum + 1 + lasteolpos = eolpos + end + local colnum = pos - lasteolpos + return linenum, colnum +end + +-- Removes any sheband ("#!") line from Lua source string. +-- CATEGORY: Lua parsing +function M.remove_shebang(src) + local shebang = src:match("^#![^\r\n]*") + return shebang and (" "):rep(#shebang) .. src:sub(#shebang+1) or src +end + + +-- Custom version of loadstring that parses out line number info +-- CATEGORY: Lua parsing +function M.loadstring(src) + local f, err = loadstring(src, "") + if f then + return f + else + err = err:gsub('^%[string ""%]:', "") + local linenum = assert(err:match("(%d+):")) + local colnum = 0 + local linenum2 = err:match("^%d+: '[^']+' expected %(to close '[^']+' at line (%d+)") + return nil, err, linenum, colnum, linenum2 + end +end + + +-- helper for ast_from_string. Raises on error. +-- FIX? filename currently ignored in Metalua +-- CATEGORY: Lua parsing +local function ast_from_string_helper(src, filename) + filename = filename or '(string)' + local lx = mlp.lexer:newstream (src, filename) + local ast = mlp.chunk(lx) + return ast +end + + +-- Counts number of lines in text. +-- Warning: the decision of whether to count a trailing new-line in a file +-- or an empty file as a line is a little subjective. This function currently +-- defines the line count as 1 plus the number of new line characters. +-- CATEGORY: utility/string +local function linecount(text) + local n = 1 + for _ in text:gmatch'\n' do + n = n + 1 + end + return n +end + + +-- Converts Lua source string to Lua AST (via mlp/gg). +-- CATEGORY: Lua parsing +function M.ast_from_string(src, filename) + local ok, ast = pcall(ast_from_string_helper, src, filename) + if not ok then + local err = ast + err = err:match('[^\n]*') + err = err:gsub("^.-:%s*line", "line") + -- mlp.chunk prepending this is undesirable. error(msg,0) would be better in gg.lua. Reported. + -- TODO-Metalua: remove when fixed in Metalua. + local linenum, colnum = err:match("line (%d+), char (%d+)") + if not linenum then + -- Metalua libraries may return "...gg.lua:56: .../mlp_misc.lua:179: End-of-file expected" + -- without the normal line/char numbers given things like "if x then end end". Should be + -- fixed probably with gg.parse_error in _chunk in mlp_misc.lua. + -- TODO-Metalua: remove when fixed in Metalua. + linenum = linecount(src) + colnum = 1 + end + local linenum2 = nil + return nil, err, linenum, colnum, linenum2 + else + return ast + end +end + + +-- Simple comment parser. Returns Metalua-style comment. +-- CATEGORY: Lua lexing +local function quick_parse_comment(src) + local s = src:match"^%-%-([^\n]*)()\n$" + if s then return {s, 1, #src, 'short'} end + local _, s = src:match(lexer.lexer.patterns.long_comment .. '\r?\n?$') + if s then return {s, 1, #src, 'long'} end + return nil +end +--FIX:check new-line correctness +--note: currently requiring \n at end of single line comment to avoid +-- incremental compilation with `--x\nf()` and removing \n from still +-- recognizing as comment `--x`. +-- currently allowing \r\n at end of long comment since Metalua includes +-- it in lineinfo of long comment (FIX:Metalua?) + + +-- Gets length of longest prefix string in both provided strings. +-- Returns max n such that text1:sub(1,n) == text2:sub(1,n) and n <= max(#text1,#text2) +-- CATEGORY: string utility +local function longest_prefix(text1, text2) + local nmin = 0 + local nmax = math.min(#text1, #text2) + while nmax > nmin do + local nmid = math.ceil((nmin+nmax)/2) + if text1:sub(1,nmid) == text2:sub(1,nmid) then + nmin = nmid + else + nmax = nmid-1 + end + end + return nmin +end + + +-- Gets length of longest postfix string in both provided strings. +-- Returns max n such that text1:sub(-n) == text2:sub(-n) and n <= max(#text1,#text2) +-- CATEGORY: string utility +local function longest_postfix(text1, text2) + local nmin = 0 + local nmax = math.min(#text1, #text2) + while nmax > nmin do + local nmid = math.ceil((nmin+nmax)/2) + if text1:sub(-nmid) == text2:sub(-nmid) then --[*] + nmin = nmid + else + nmax = nmid-1 + end + end + return nmin +end -- differs from longest_prefix only on line [*] + + + +-- Determines AST node that must be re-evaluated upon changing code string from +-- `src` to `bsrc`, given previous top_ast/tokenlist/src. +-- Note: decorates top_ast as side-effect. +-- If preserve is true, then does not expand AST match even if replacement is invalid. +-- CATEGORY: AST/tokenlist manipulation +function M.invalidated_code(top_ast, tokenlist, src, bsrc, preserve) + -- Converts posiiton range in src to position range in bsrc. + local function range_transform(src_fpos, src_lpos) + local src_nlpos = #src - src_lpos + local bsrc_fpos = src_fpos + local bsrc_lpos = #bsrc - src_nlpos + return bsrc_fpos, bsrc_lpos + end + + if src == bsrc then return end -- up-to-date + + -- Find range of positions in src that differences correspond to. + -- Note: for zero byte range, src_pos2 = src_pos1 - 1. + local npre = longest_prefix(src, bsrc) + local npost = math.min(#src-npre, longest_postfix(src, bsrc)) + -- note: min avoids overlap ambiguity + local src_fpos, src_lpos = 1 + npre, #src - npost + + -- Find smallest AST node containing src range above. May also + -- be contained in (smaller) comment or whitespace. + local match_ast, match_comment, iswhitespace = + M.smallest_ast_containing_range(top_ast, tokenlist, src_fpos, src_lpos) + DEBUG('invalidate-smallest:', match_ast and (match_ast.tag or 'notag'), match_comment, iswhitespace) + + -- Determine which (ast, comment, or whitespace) to match, and get its pos range in src and bsrc. + local srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype + if iswhitespace then + mast, mtype = nil, 'whitespace' + srcm_fpos, srcm_lpos = src_fpos, src_lpos + elseif match_comment then + mast, mtype = match_comment, 'comment' + srcm_fpos, srcm_lpos = match_comment.fpos, match_comment.lpos + else + mast, mtype = match_ast, 'ast' + repeat + srcm_fpos, srcm_lpos = M.ast_pos_range(mast, tokenlist) + if not srcm_fpos then + if mast == top_ast then + srcm_fpos, srcm_lpos = 1, #src + break + else + M.ensure_parents_marked(top_ast) + mast = mast.parent + end + end + until srcm_fpos + end + bsrcm_fpos, bsrcm_lpos = range_transform(srcm_fpos, srcm_lpos) + + -- Never expand match if preserve specified. + if preserve then + return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype + end + + -- Determine if replacement could break parent nodes. + local isreplacesafe + if mtype == 'whitespace' then + if bsrc:sub(bsrcm_fpos, bsrcm_lpos):match'^%s*$' then -- replaced with whitespace + if bsrc:sub(bsrcm_fpos-1, bsrcm_lpos+1):match'%s' then -- not eliminating whitespace + isreplacesafe = true + end + end + elseif mtype == 'comment' then + local m2src = bsrc:sub(bsrcm_fpos, bsrcm_lpos) + DEBUG('invalidate-comment[' .. m2src .. ']') + if quick_parse_comment(m2src) then -- replaced with comment + isreplacesafe = true + end + end + if isreplacesafe then -- return on safe replacement + return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype + end + + -- Find smallest containing statement block that will compile (or top_ast). + while 1 do + match_ast = M.get_containing_statementblock(match_ast, top_ast) + if match_ast == top_ast then + return 1,#src, 1, #bsrc, match_ast, 'statblock' + -- entire AST invalidated + end + local srcm_fpos, srcm_lpos = M.ast_pos_range(match_ast, tokenlist) + local bsrcm_fpos, bsrcm_lpos = range_transform(srcm_fpos, srcm_lpos) + local msrc = bsrc:sub(bsrcm_fpos, bsrcm_lpos) + DEBUG('invalidate-statblock:', match_ast and match_ast.tag, '[' .. msrc .. ']') + if loadstring(msrc) then -- compiled + return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, match_ast, 'statblock' + end + M.ensure_parents_marked(top_ast) + match_ast = match_ast.parent + end +end + + +-- Walks AST `ast` in arbitrary order, visiting each node `n`, executing `fdown(n)` (if specified) +-- when doing down and `fup(n)` (if specified) when going if. +-- CATEGORY: AST walk +function M.walk(ast, fdown, fup) + assert(type(ast) == 'table') + if fdown then fdown(ast) end + for _,bast in ipairs(ast) do + if type(bast) == 'table' then + M.walk(bast, fdown, fup) + end + end + if fup then fup(ast) end +end + + +-- Replaces contents of table t1 with contents of table t2. +-- Does not change metatable (if any). +-- This function is useful for swapping one AST node with another +-- while preserving any references to the node. +-- CATEGORY: table utility +function M.switchtable(t1, t2) + for k in pairs(t1) do t1[k] = nil end + for k in pairs(t2) do t1[k] = t2[k] end +end + + +-- Inserts all elements in list bt at index i in list t. +-- CATEGORY: table utility +local function tinsertlist(t, i, bt) + local oldtlen, delta = #t, i - 1 + for ti = #t + 1, #t + #bt do t[ti] = false end -- preallocate (avoid holes) + for ti = oldtlen, i, -1 do t[ti + #bt] = t[ti] end -- shift + for bi = 1, #bt do t[bi + delta] = bt[bi] end -- fill +end +--[=[TESTSUITE: +local function _tinsertlist(t, i, bt) + for bi=#bt,1,-1 do table.insert(t, i, bt[bi]) end +end -- equivalent but MUCH less efficient for large tables +local function _tinsertlist(t, i, bt) + for bi=1,#bt do table.insert(t, i+bi-1, bt[bi]) end +end -- equivalent but MUCH less efficient for large tables +local t = {}; tinsertlist(t, 1, {}); assert(table.concat(t)=='') +local t = {}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='23') +local t = {4}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='234') +local t = {2}; tinsertlist(t, 2, {3,4}); assert(table.concat(t)=='234') +local t = {4,5}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='2345') +local t = {2,5}; tinsertlist(t, 2, {3,4}); assert(table.concat(t)=='2345') +local t = {2,3}; tinsertlist(t, 3, {4,5}); assert(table.concat(t)=='2345') +print 'DONE' +--]=] + + + +-- Gets list of keyword positions related to node ast in source src +-- note: ast must be visible, i.e. have lineinfo (e.g. unlike `Id "self" definition). +-- Note: includes operators. +-- Note: Assumes ast Metalua-style lineinfo is valid. +-- CATEGORY: tokenlist build +function M.get_keywords(ast, src) + local list = {} + if not ast.lineinfo then return list end + -- examine space between each pair of children i and j. + -- special cases: 0 is before first child and #ast+1 is after last child + + -- Put children in lexical order. + -- Some binary operations have arguments reversed from lexical order. + -- For example, `a > b` becomes `Op{'lt', `Id 'b', `Id 'a'} + local oast = + (ast.tag == 'Op' and #ast == 3 and ast[2].lineinfo.first[3] > ast[3].lineinfo.first[3]) + and {ast[1], ast[3], ast[2]} or ast + + local i = 0 + while i <= #ast do + -- j is node following i that has lineinfo + local j = i+1; while j < #ast+1 and not oast[j].lineinfo do j=j+1 end + + -- Get position range [fpos,lpos] between subsequent children. + local fpos + if i == 0 then -- before first child + fpos = ast.lineinfo.first[3] + else + local last = oast[i].lineinfo.last; local c = last.comments + fpos = (c and #c > 0 and c[#c][3] or last[3]) + 1 + end + local lpos + if j == #ast+1 then -- after last child + lpos = ast.lineinfo.last[3] + else + local first = oast[j].lineinfo.first; local c = first.comments + --DEBUG('first', ast.tag, first[3], src:sub(first[3], first[3]+3)) + lpos = (c and #c > 0 and c[1][2] or first[3]) - 1 + end + + -- Find keyword in range. + local spos = fpos + repeat + local mfpos, tok, mlppos = src:match("^%s*()(%a+)()", spos) + if not mfpos then + mfpos, tok, mlppos = src:match("^%s*()(%p+)()", spos) + end + if mfpos then + local mlpos = mlppos-1 + if mlpos > lpos then mlpos = lpos end + --DEBUG('look', ast.tag, #ast,i,j,'*', mfpos, tok, mlppos, fpos, lpos, src:sub(fpos, fpos+5)) + if mlpos >= mfpos then + list[#list+1] = mfpos + list[#list+1] = mlpos + end + end + spos = mlppos + until not spos or spos > lpos + -- note: finds single keyword. in `local function` returns only `local` + --DEBUG(i,j ,'test[' .. src:sub(fpos, lpos) .. ']') + + i = j -- next + + --DESIGN:Lua: comment: string.match accepts a start position but not a stop position + end + return list +end +-- Q:Metalua: does ast.lineinfo[loc].comments imply #ast.lineinfo[loc].comments > 0 ? + + + +-- Generates ordered list of tokens in top_ast/src. +-- Note: currently ignores operators and parens. +-- Note: Modifies ast. +-- Note: Assumes ast Metalua-style lineinfo is valid. +-- CATEGORY: AST/tokenlist query +local isterminal = {Nil=true, Dots=true, True=true, False=true, Number=true, String=true, + Dots=true, Id=true} +local function compare_tokens_(atoken, btoken) return atoken.fpos < btoken.fpos end +function M.ast_to_tokenlist(top_ast, src) + local tokens = {} -- {nbytes=#src} + local isseen = {} + M.walk(top_ast, function(ast) + if isterminal[ast.tag] then -- Extract terminal + local token = ast + if ast.lineinfo then + token.fpos, token.lpos, token.ast = ast.lineinfo.first[3], ast.lineinfo.last[3], ast + table.insert(tokens, token) + end + else -- Extract non-terminal + local keywordposlist = M.get_keywords(ast, src) + for i=1,#keywordposlist,2 do + local fpos, lpos = keywordposlist[i], keywordposlist[i+1] + local toksrc = src:sub(fpos, lpos) + local token = {tag='Keyword', fpos=fpos, lpos=lpos, ast=ast, toksrc} + table.insert(tokens, token) + end + end + -- Extract comments + for i=1,2 do + local comments = ast.lineinfo and ast.lineinfo[i==1 and 'first' or 'last'].comments + if comments then for _, comment in ipairs(comments) do + if not isseen[comment] then + comment.tag = 'Comment' + local token = comment + token.fpos, token.lpos, token.ast = comment[2], comment[3], comment + table.insert(tokens, token) + isseen[comment] = true + end + end end + end + end, nil) + table.sort(tokens, compare_tokens_) + return tokens +end + + +-- Gets tokenlist range [fidx,lidx] covered by ast. Returns nil,nil if not found. +--FIX:PERFORMANCE:this is slow on large files. +-- CATEGORY: AST/tokenlist query +function M.ast_idx_range_in_tokenlist(tokenlist, ast) + -- Get list of primary nodes under ast. + local isold = {}; M.walk(ast, function(ast) isold[ast] = true end) + -- Get range. + local fidx, lidx + for idx=1,#tokenlist do + local token = tokenlist[idx] + if isold[token.ast] then + lidx = idx + if not fidx then fidx = idx end + end + end + return fidx, lidx +end + + +-- Gets index range in tokenlist overlapped by character position range [fpos, lpos]. +-- For example, `do ff() end` with range ` ff() ` would match tokens `ff()`. +-- Tokens partly inside range are counted, so range `f()` would match tokens `ff()`. +-- If lidx = fidx - 1, then position range is whitespace between tokens lidx (on left) +-- and fidx (on right), and this may include token pseudoindices 0 (start of file) and +-- #tokenlist+1 (end of file). +-- Note: lpos == fpos - 1 indicates zero-width range between chars lpos and fpos. +-- CATEGORY: tokenlist query +function M.tokenlist_idx_range_over_pos_range(tokenlist, fpos, lpos) + -- Find first/last indices of tokens overlapped (even partly) by position range. + local fidx, lidx + for idx=1,#tokenlist do + local token = tokenlist[idx] + --if (token.fpos >= fpos and token.fpos <= lpos) or (token.lpos >= fpos and token.lpos <= lpos) then -- token overlaps range + if fpos <= token.lpos and lpos >= token.fpos then -- range overlaps token (even partially) + if not fidx then fidx = idx end + lidx = idx + end + end + if not fidx then -- on fail, check between tokens + for idx=1,#tokenlist+1 do -- between idx-1 and idx + local tokfpos, toklpos = tokenlist[idx-1] and tokenlist[idx-1].lpos, tokenlist[idx] and tokenlist[idx].fpos + if (not tokfpos or fpos > tokfpos) and (not toklpos or lpos < toklpos) then -- range between tokens + return idx, idx-1 + end + end + end + return fidx, lidx +end +--[=[TESTSUITE +local function test(...) + return table.concat({M.tokenlist_idx_range_over_pos_range(...)}, ',') +end +check('==', test({}, 2, 2), "1,0") -- no tokens +check('==', test({{tag='Id', fpos=1, lpos=1}}, 2, 2), "2,1") -- right of one token +check('==', test({{tag='Id', fpos=3, lpos=3}}, 2, 2), "1,0") -- left of one token +check('==', test({{tag='Id', fpos=3, lpos=4}}, 2, 3), "1,1") -- left partial overlap one token +check('==', test({{tag='Id', fpos=3, lpos=4}}, 4, 5), "1,1") -- right partial overlap one token +check('==', test({{tag='Id', fpos=3, lpos=6}}, 4, 5), "1,1") -- partial inner overlap one token +check('==', test({{tag='Id', fpos=3, lpos=6}}, 3, 6), "1,1") -- exact overlap one token +check('==', test({{tag='Id', fpos=4, lpos=5}}, 3, 6), "1,1") -- extra overlap one token +check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 4, 4), "2,1") -- between tokens, " " exact +check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 4, 3), "2,1") -- between tokens, "" on left +check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 5, 4), "2,1") -- between tokens, "" on right +check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=4, lpos=5}}, 4, 3), "2,1") -- between tokens, "" exact +--]=] + +-- Removes tokens in tokenlist covered by ast. +-- CATEGORY: tokenlist manipulation +local function remove_ast_in_tokenlist(tokenlist, ast) + local fidx, lidx = M.ast_idx_range_in_tokenlist(tokenlist, ast) + if fidx then -- note: fidx implies lidx + for idx=lidx,fidx,-1 do table.remove(tokenlist, idx) end + end +end + + +-- Inserts tokens from btokenlist into tokenlist. Preserves sort. +-- CATEGORY: tokenlist manipulation +local function insert_tokenlist(tokenlist, btokenlist) + local ftoken = btokenlist[1] + if ftoken then + -- Get index in tokenlist in which to insert tokens in btokenlist. + local fidx + for idx=1,#tokenlist do + if tokenlist[idx].fpos > ftoken.fpos then fidx = idx; break end + end + fidx = fidx or #tokenlist + 1 -- else append + + -- Insert tokens. + tinsertlist(tokenlist, fidx, btokenlist) + end +end + + +-- Get character position range covered by ast in tokenlist. Returns nil,nil on not found. +-- CATEGORY: AST/tokenlist query +function M.ast_pos_range(ast, tokenlist) -- IMPROVE:style: ast_idx_range_in_tokenlist has params reversed + local fidx, lidx = M.ast_idx_range_in_tokenlist(tokenlist, ast) + if fidx then + return tokenlist[fidx].fpos, tokenlist[lidx].lpos + else + return nil, nil + end +end + + +-- Gets string representation of AST node. nil if none. +-- IMPROVE: what if node is empty block? +-- CATEGORY: AST/tokenlist query +function M.ast_to_text(ast, tokenlist, src) -- IMPROVE:style: ast_idx_range_in_tokenlist has params reversed + local fpos, lpos = M.ast_pos_range(ast, tokenlist) + if fpos then + return src:sub(fpos, lpos) + else + return nil + end +end + + + +-- Gets smallest AST node in top_ast/tokenlist/src +-- completely containing position range [pos1, pos2]. +-- careful: "function" is not part of the `Function node. +-- If range is inside comment, returns comment also. +-- If range is inside whitespace, then returns true in third return value. +-- CATEGORY: AST/tokenlist query +function M.smallest_ast_containing_range(top_ast, tokenlist, pos1, pos2) + local f0idx, l0idx = M.tokenlist_idx_range_over_pos_range(tokenlist, pos1, pos2) + + -- Find enclosing AST. + M.ensure_parents_marked(top_ast) + local fidx, lidx = f0idx, l0idx + while tokenlist[fidx] and not tokenlist[fidx].ast.parent do fidx = fidx - 1 end + while tokenlist[lidx] and not tokenlist[lidx].ast.parent do lidx = lidx + 1 end + -- DEBUG(fidx, lidx, f0idx, l0idx, #tokenlist, pos1, pos2, tokenlist[fidx], tokenlist[lidx]) + local ast = not (tokenlist[fidx] and tokenlist[lidx]) and top_ast or + M.common_ast_parent(tokenlist[fidx].ast, tokenlist[lidx].ast, top_ast) + -- DEBUG('m2', tokenlist[fidx], tokenlist[lidx], top_ast, ast, ast and ast.tag) + if l0idx == f0idx - 1 then -- whitespace + return ast, nil, true + elseif l0idx == f0idx and tokenlist[l0idx].tag == 'Comment' then + return ast, tokenlist[l0idx], nil + else + return ast, nil, nil + end +end +--IMPROVE: handle string edits and maybe others + + +-- Gets smallest statement block containing position pos or +-- nearest statement block before pos, whichever is smaller, given ast/tokenlist. +function M.current_statementblock(ast, tokenlist, pos) + local fidx,lidx = M.tokenlist_idx_range_over_pos_range(tokenlist, pos, pos) + if fidx > lidx then fidx = lidx end -- use nearest backward + + -- Find closest AST node backward + while fidx >= 1 and tokenlist[fidx].tag == 'Comment' do fidx=fidx-1 end + + if fidx < 1 then return ast, false end + local mast = tokenlist[fidx].ast + if not mast then return ast, false end + mast = M.get_containing_statementblock(mast, ast) + local isafter = false + if mast.tag2 ~= 'Block' then + local mfidx,mlidx = M.ast_idx_range_in_tokenlist(tokenlist, mast) + if pos > mlidx then + isafter = true + end + end + + return mast, isafter +end + +-- Gets index of bast in ast (nil if not found). +-- CATEGORY: AST query +function M.ast_idx(ast, bast) + for idx=1,#ast do + if ast[idx] == bast then return idx end + end + return nil +end + + +-- Gets parent of ast and index of ast in parent. +-- Root node top_ast must also be provided. Returns nil, nil if ast is root. +-- Note: may call mark_parents. +-- CATEGORY: AST query +function M.ast_parent_idx(top_ast, ast) + if ast == top_ast then return nil, nil end + M.ensure_parents_marked(top_ast); assert(ast.parent) + local idx = M.ast_idx(ast.parent, ast) + return ast.parent, idx +end + + +-- Gets common parent of aast and bast. Always returns value. +-- Must provide root top_ast too. +-- CATEGORY: AST query +function M.common_ast_parent(aast, bast, top_ast) + M.ensure_parents_marked(top_ast) + local isparent = {} + local tast = bast; repeat isparent[tast] = true; tast = tast.parent until not tast + local uast = aast; repeat if isparent[uast] then return uast end; uast = uast.parent until not uast + assert(false) +end + + +-- Replaces old_ast with new_ast/new_tokenlist in top_ast/tokenlist. +-- Note: assumes new_ast is a block. assumes old_ast is a statement or block. +-- CATEGORY: AST/tokenlist +function M.replace_statements(top_ast, tokenlist, old_ast, new_ast, new_tokenlist) + remove_ast_in_tokenlist(tokenlist, old_ast) + insert_tokenlist(tokenlist, new_tokenlist) + if old_ast == top_ast then -- special case: no parent + M.switchtable(old_ast, new_ast) -- note: safe since block is not in tokenlist. + else + local parent_ast, idx = M.ast_parent_idx(top_ast, old_ast) + table.remove(parent_ast, idx) + tinsertlist(parent_ast, idx, new_ast) + end + + -- fixup annotations + for _,bast in ipairs(new_ast) do + if top_ast.tag2 then M.mark_tag2(bast, bast.tag == 'Do' and 'StatBlock' or 'Block') end + if old_ast.parent then M.mark_parents(bast, old_ast.parent) end + end +end + + +-- Adjusts lineinfo in tokenlist. +-- All char positions starting at pos1 are shifted by delta number of chars. +-- CATEGORY: tokenlist +function M.adjust_lineinfo(tokenlist, pos1, delta) + for _,token in ipairs(tokenlist) do + if token.fpos >= pos1 then + token.fpos = token.fpos + delta + end + if token.lpos >= pos1 then + token.lpos = token.lpos + delta + end + end + --tokenlist.nbytes = tokenlist.nbytes + delta +end + + +-- For each node n in ast, sets n.parent to parent node of n. +-- Assumes ast.parent will be parent_ast (may be nil) +-- CATEGORY: AST query +function M.mark_parents(ast, parent_ast) + ast.parent = parent_ast + for _,ast2 in ipairs(ast) do + if type(ast2) == 'table' then + M.mark_parents(ast2, ast) + end + end +end + + +-- Calls mark_parents(ast) if ast not marked. +-- CATEGORY: AST query +function M.ensure_parents_marked(ast) + if ast[1] and not ast[1].parent then M.mark_parents(ast) end +end + + +-- For each node n in ast, sets n.tag2 to context string: +-- 'Block' - node is block +-- 'Stat' - node is statement +-- 'StatBlock' - node is statement and block (i.e. `Do) +-- 'Exp' - node is expression +-- 'Explist' - node is expression list (or identifier list) +-- 'Pair' - node is key-value pair in table constructor +-- note: ast.tag2 will be set to context. +-- CATEGORY: AST query +local iscertainstat = {Do=true, Set=true, While=true, Repeat=true, If=true, + Fornum=true, Forin=true, Local=true, Localrec=true, Return=true, Break=true} +function M.mark_tag2(ast, context) + context = context or 'Block' + ast.tag2 = context + for i,bast in ipairs(ast) do + if type(bast) == 'table' then + local nextcontext + if bast.tag == 'Do' then + nextcontext = 'StatBlock' + elseif iscertainstat[bast.tag] then + nextcontext = 'Stat' + elseif bast.tag == 'Call' or bast.tag == 'Invoke' then + nextcontext = context == 'Block' and 'Stat' or 'Exp' + --DESIGN:Metalua: these calls actually contain expression lists, + -- but the expression list is not represented as a complete node + -- by Metalua (as blocks are in `Do statements) + elseif bast.tag == 'Pair' then + nextcontext = 'Pair' + elseif not bast.tag then + if ast.tag == 'Set' or ast.tag == 'Local' or ast.tag == 'Localrec' + or ast.tag == 'Forin' and i <= 2 + or ast.tag == 'Function' and i == 1 + then + nextcontext = 'Explist' + else + nextcontext = 'Block' + end + else + nextcontext = 'Exp' + end + M.mark_tag2(bast, nextcontext) + end + end +end + + +-- Gets smallest statement or block containing or being `ast`. +-- The AST root node `top_ast` must also be provided. +-- Note: may decorate AST as side-effect (mark_tag2/mark_parents). +-- top_ast is assumed a block, so this is always successful. +-- CATEGORY: AST query +function M.get_containing_statementblock(ast, top_ast) + if not top_ast.tag2 then M.mark_tag2(top_ast) end + if ast.tag2 == 'Stat' or ast.tag2 == 'StatBlock' or ast.tag2 == 'Block' then + return ast + else + M.ensure_parents_marked(top_ast) + return M.get_containing_statementblock(ast.parent, top_ast) + end +end + + +-- Finds smallest statement, block, or comment AST in ast/tokenlist containing position +-- range [fpos, lpos]. If allowexpand is true (default nil) and located AST +-- coincides with position range, then next containing statement is used +-- instead (this allows multiple calls to further expand the statement selection). +-- CATEGORY: AST query +function M.select_statementblockcomment(ast, tokenlist, fpos, lpos, allowexpand) +--IMPROVE: rename ast to top_ast + local match_ast, comment_ast = M.smallest_ast_containing_range(ast, tokenlist, fpos, lpos) + local select_ast = comment_ast or M.get_containing_statementblock(match_ast, ast) + local nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist) + --DEBUG('s', nfpos, nlpos, fpos, lpos, match_ast.tag, select_ast.tag) + if allowexpand and fpos == nfpos and lpos == nlpos then + if comment_ast then + -- Select enclosing statement. + select_ast = match_ast + nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist) + else + -- note: multiple times may be needed to expand selection. For example, in + -- `for x=1,2 do f() end` both the statement `f()` and block `f()` have + -- the same position range. + M.ensure_parents_marked(ast) + while select_ast.parent and fpos == nfpos and lpos == nlpos do + select_ast = M.get_containing_statementblock(select_ast.parent, ast) + nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist) + end + end + end + return nfpos, nlpos +end + + +-- Converts tokenlist to string representation for debugging. +-- CATEGORY: tokenlist debug +function M.dump_tokenlist(tokenlist) + local ts = {} + for i,token in ipairs(tokenlist) do + ts[#ts+1] = 'tok.' .. i .. ': [' .. token.fpos .. ',' .. token.lpos .. '] ' + .. tostring(token[1]) .. ' ' .. tostring(token.ast.tag) + end + return table.concat(ts, '\n') -- .. 'nbytes=' .. tokenlist.nbytes .. '\n' +end + + +--FIX:Q: does this handle Unicode ok? + +--FIX?:Metalua: fails on string with escape sequence '\/'. The Reference Manual +-- doesn't say this sequence is valid though. + +--FIX:Metalua: In `local --[[x]] function --[[y]] f() end`, +-- 'x' comment omitted from AST. + +--FIX:Metalua: `do --[[x]] end` doesn't generate comments in AST. +-- `if x then --[[x]] end` and `while 1 do --[[x]] end` generates +-- comments in first/last of block + +--FIX:Metalua: `--[[x]] f() --[[y]]` returns lineinfo around `f()`. +-- `--[[x]] --[[y]]` returns lineinfo around everything. + +--FIX:Metalua: `while 1 do --[[x]] --[[y]] end` returns first > last +-- lineinfo for contained block + +--FIX:Metalua: search for "PATCHED:LuaInspect" in the metalualib folder. + +--FIX?:Metalua: loadstring parses "--x" but metalua omits the comment in the AST + +--FIX?:Metalua: `local x` is generating `Local{{`Id{x}}, {}}`, which +-- has no lineinfo on {}. This is contrary to the Metalua +-- spec: `Local{ {ident+} {expr+}? }. +-- Other things like `self` also generate no lineinfo. +-- The ast2.lineinfo above avoids this. + +--FIX:Metalua: Metalua shouldn't overwrite ipairs/pairs. Note: Metalua version +-- doesn't set errorlevel correctly. + +--Q:Metalua: Why does `return --[[y]] z --[[x]]` have +-- lineinfo.first.comments, lineinfo.last.comments, +-- plus lineinfo.comments (which is the same as lineinfo.first.comments) ? + +--CAUTION:Metalua: `do f() end` returns lineinfo around `do f() end`, while +-- `while 1 do f() end` returns lineinfo around `f()` for inner block. + +--CAUTION:Metalua: The lineinfo on Metalua comments is inconsistent with other +-- nodes + +--CAUTION:Metalua: lineinfo of table in `f{}` is [3,2], of `f{ x,y }` it's [4,6]. +-- This is inconsistent with `x={}` which is [3,4] and `f""` which is [1,2] +-- for the string. + +--CAUTION:Metalua: only the `function()` form of `Function includes `function` +-- in lineinfo. 'function' is part of `Localrec and `Set in syntactic sugar form. + + +--[=[TESTSUITE +-- test longest_prefix/longest_postfix +local function pr(text1, text2) + local lastv + local function same(v) + assert(not lastv or v == lastv); lastv = v; return v + end + local function test1(text1, text2) -- test prefix/postfix + same(longest_prefix(text1, text2)) + same(longest_postfix(text1:reverse(), text2:reverse())) + end + local function test2(text1, text2) -- test swap + test1(text1, text2) + test1(text2, text1) + end + for _,extra in ipairs{"", "x", "xy", "xyz"} do -- test extra chars + test2(text1, text2..extra) + test2(text2, text1..extra) + end + return lastv +end +check('==', pr("",""), 0) +check('==', pr("a",""), 0) +check('==', pr("a","a"), 1) +check('==', pr("ab",""), 0) +check('==', pr("ab","a"), 1) +check('==', pr("ab","ab"), 2) +check('==', pr("abcdefg","abcdefgh"), 7) +--]=] + +--[=[TESTSUITE +print 'DONE' +--]=] + + +return M diff --git a/builders/lua-inspect/lib/luainspect/command.lua b/builders/lua-inspect/lib/luainspect/command.lua new file mode 100755 index 000000000..202e6258a --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/command.lua @@ -0,0 +1,85 @@ +#!/usr/bin/env lua + +-- luainspect.command - LuaInspect command-line interface. +-- This file can be invoked from the command line + +package.path = package.path .. ';metalualib/?.lua' +package.path = package.path .. ';lib/?.lua' + + +local LA = require "luainspect.ast" +local LI = require "luainspect.init" + +local function loadfile(filename) + local fh = assert(io.open(filename, 'r')) + local data = fh:read'*a' + fh:close() + return data +end + +local function writefile(filename, output) + local fh = assert(io.open(filename, 'wb')) + fh:write(output) + fh:close() +end + +local function fail(err) + io.stderr:write(err, '\n') + os.exit(1) +end + +-- Warning/status reporting function. +-- CATEGORY: reporting + AST +local function report(s) io.stderr:write(s, "\n") end + +-- parse flags +local function getopt(c) + if arg[1] then + local x = arg[1]:match('^%-'..c..'(.*)') + if x then table.remove(arg, 1) + if x == '' and arg[1] then x = arg[1]; table.remove(arg, 1) end + return x + end + end +end +local fmt = getopt 'f' or 'delimited' +local ast_to_text = + (fmt == 'delimited') and require 'luainspect.delimited'.ast_to_delimited or + (fmt == 'html') and require 'luainspect.html'.ast_to_html or + fail('invalid format specified, -f'..fmt) +local libpath = getopt 'l' or '.' +local outpath = getopt 'o' or '-' + +local path = unpack(arg) +if not path then + fail[[ +inspect.lua [options] + -f {delimited|html} - output format + -l path path to library sources (e.g. luainspect.css/js), for html only + -o path output path (defaults to standard output (-) +]] +end + +local src = loadfile(path) +local ast, err, linenum, colnum, linenum2 = LA.ast_from_string(src, path) + +--require "metalua.table2"; table.print(ast, 'hash', 50) +if ast then + local tokenlist = LA.ast_to_tokenlist(ast, src) + LI.inspect(ast, tokenlist, src, report) + LI.mark_related_keywords(ast, tokenlist, src) + + local output = ast_to_text(ast, src, tokenlist, {libpath=libpath}) + + if outpath == '-' then + io.stdout:write(output) + else + writefile(outpath, output) + end +else + io.stderr:write("syntax error: ", err) + os.exit(1) +end + + + diff --git a/builders/lua-inspect/lib/luainspect/compat_env.lua b/builders/lua-inspect/lib/luainspect/compat_env.lua new file mode 100644 index 000000000..326b3b4c4 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/compat_env.lua @@ -0,0 +1,390 @@ +--[[ + + compat_env v$(_VERSION) - Lua 5.1/5.2 environment compatibility functions + +SYNOPSIS + + -- Get load/loadfile compatibility functions only if using 5.1. + local CL = pcall(load, '') and _G or require 'compat_env' + local load = CL.load + local loadfile = CL.loadfile + + -- The following now works in both Lua 5.1 and 5.2: + assert(load('return 2*pi', nil, 't', {pi=math.pi}))() + assert(loadfile('ex.lua', 't', {print=print}))() + + -- Get getfenv/setfenv compatibility functions only if using 5.2. + local getfenv = _G.getfenv or require 'compat_env'.getfenv + local setfenv = _G.setfenv or require 'compat_env'.setfenv + local function f() return x end + setfenv(f, {x=2}) + print(x, getfenv(f).x) --> 2, 2 + +DESCRIPTION + + This module provides Lua 5.1/5.2 environment related compatibility functions. + This includes implementations of Lua 5.2 style `load` and `loadfile` + for use in Lua 5.1. It also includes Lua 5.1 style `getfenv` and `setfenv` + for use in Lua 5.2. + +API + + local CL = require 'compat_env' + + CL.load (ld [, source [, mode [, env] ] ]) --> f [, err] + + This behaves the same as the Lua 5.2 `load` in both + Lua 5.1 and 5.2. + http://www.lua.org/manual/5.2/manual.html#pdf-load + + CL.loadfile ([filename [, mode [, env] ] ]) --> f [, err] + + This behaves the same as the Lua 5.2 `loadfile` in both + Lua 5.1 and 5.2. + http://www.lua.org/manual/5.2/manual.html#pdf-loadfile + + CL.getfenv ([f]) --> t + + This is identical to the Lua 5.1 `getfenv` in Lua 5.1. + This behaves similar to the Lua 5.1 `getfenv` in Lua 5.2. + When a global environment is to be returned, or when `f` is a + C function, this returns `_G` since Lua 5.2 doesn't have + (thread) global and C function environments. This will also + return `_G` if the Lua function `f` lacks an `_ENV` + upvalue, but it will raise an error if uncertain due to lack of + debug info. It is not normally considered good design to use + this function; when possible, use `load` or `loadfile` instead. + http://www.lua.org/manual/5.1/manual.html#pdf-getfenv + + CL.setfenv (f, t) + + This is identical to the Lua 5.1 `setfenv` in Lua 5.1. + This behaves similar to the Lua 5.1 `setfenv` in Lua 5.2. + This will do nothing if `f` is a Lua function that + lacks an `_ENV` upvalue, but it will raise an error if uncertain + due to lack of debug info. See also Design Notes below. + It is not normally considered good design to use + this function; when possible, use `load` or `loadfile` instead. + http://www.lua.org/manual/5.1/manual.html#pdf-setfenv + +DESIGN NOTES + + This module intends to provide robust and fairly complete reimplementations + of the environment related Lua 5.1 and Lua 5.2 functions. + No effort is made, however, to simulate rare or difficult to simulate features, + such as thread environments, although this is liable to change in the future. + Such 5.1 capabilities are discouraged and ideally + removed from 5.1 code, thereby allowing your code to work in both 5.1 and 5.2. + + In Lua 5.2, a `setfenv(f, {})`, where `f` lacks any upvalues, will be silently + ignored since there is no `_ENV` in this function to write to, and the + environment will have no effect inside the function anyway. However, + this does mean that `getfenv(setfenv(f, t))` does not necessarily equal `t`, + which is incompatible with 5.1 code (a possible workaround would be [1]). + If `setfenv(f, {})` has an upvalue but no debug info, then this will raise + an error to prevent inadvertently executing potentially untrusted code in the + global environment. + + It is not normally considered good design to use `setfenv` and `getfenv` + (one reason they were removed in 5.2). When possible, consider replacing + these with `load` or `loadfile`, which are more restrictive and have native + implementations in 5.2. + + This module might be merged into a more general Lua 5.1/5.2 compatibility + library (e.g. a full reimplementation of Lua 5.2 `_G`). However, + `load/loadfile/getfenv/setfenv` perhaps are among the more cumbersome + functions not to have. + +INSTALLATION + + Download compat_env.lua: + + wget https://raw.github.com/gist/1654007/compat_env.lua + + Copy compat_env.lua into your LUA_PATH. + + Alternately, unpack, test, and install into LuaRocks: + + wget https://raw.github.com/gist/1422205/sourceunpack.lua + lua sourceunpack.lua compat_env.lua + (cd out && luarocks make) + +Related work + + http://lua-users.org/wiki/LuaVersionCompatibility + https://github.com/stevedonovan/Penlight/blob/master/lua/pl/utils.lua + - penlight implementations of getfenv/setfenv + http://lua-users.org/lists/lua-l/2010-06/msg00313.html + - initial getfenv/setfenv implementation + +References + + [1] http://lua-users.org/lists/lua-l/2010-06/msg00315.html + +Copyright + +(c) 2012 David Manura. Licensed under the same terms as Lua 5.1/5.2 (MIT license). + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +--]]--------------------------------------------------------------------- + +local M = {_TYPE='module', _NAME='compat_env', _VERSION='0.2.20120124'} + +local function check_chunk_type(s, mode) + local nmode = mode or 'bt' + local is_binary = s and #s > 0 and s:byte(1) == 27 + if is_binary and not nmode:match'b' then + return nil, ("attempt to load a binary chunk (mode is '%s')"):format(mode) + elseif not is_binary and not nmode:match't' then + return nil, ("attempt to load a text chunk (mode is '%s')"):format(mode) + end + return true +end + +local IS_52_LOAD = pcall(load, '') +if IS_52_LOAD then + M.load = _G.load + M.loadfile = _G.loadfile +else + -- 5.2 style `load` implemented in 5.1 + function M.load(ld, source, mode, env) + local f + if type(ld) == 'string' then + local s = ld + local ok, err = check_chunk_type(s, mode); if not ok then return ok, err end + local err; f, err = loadstring(s, source); if not f then return f, err end + elseif type(ld) == 'function' then + local ld2 = ld + if (mode or 'bt') ~= 'bt' then + local first = ld() + local ok, err = check_chunk_type(first, mode); if not ok then return ok, err end + ld2 = function() + if first then + local chunk=first; first=nil; return chunk + else return ld() end + end + end + local err; f, err = load(ld2, source); if not f then return f, err end + else + error(("bad argument #1 to 'load' (function expected, got %s)"):format(type(ld)), 2) + end + if env then setfenv(f, env) end + return f + end + + -- 5.2 style `loadfile` implemented in 5.1 + function M.loadfile(filename, mode, env) + if (mode or 'bt') ~= 'bt' then + local ioerr + local fh, err = io.open(filename, 'rb'); if not fh then return fh, err end + local function ld() local chunk; chunk,ioerr = fh:read(4096); return chunk end + local f, err = M.load(ld, filename and '@'..filename, mode, env) + fh:close() + if not f then return f, err end + if ioerr then return nil, ioerr end + return f + else + local f, err = loadfile(filename); if not f then return f, err end + if env then setfenv(f, env) end + return f + end + end +end + +if _G.setfenv then -- Lua 5.1 + M.setfenv = _G.setfenv + M.getfenv = _G.getfenv +else -- >= Lua 5.2 + -- helper function for `getfenv`/`setfenv` + local function envlookup(f) + local name, val + local up = 0 + local unknown + repeat + up=up+1; name, val = debug.getupvalue(f, up) + if name == '' then unknown = true end + until name == '_ENV' or name == nil + if name ~= '_ENV' then + up = nil + if unknown then error("upvalues not readable in Lua 5.2 when debug info missing", 3) end + end + return (name == '_ENV') and up, val, unknown + end + + -- helper function for `getfenv`/`setfenv` + local function envhelper(f, name) + if type(f) == 'number' then + if f < 0 then + error(("bad argument #1 to '%s' (level must be non-negative)"):format(name), 3) + elseif f < 1 then + error("thread environments unsupported in Lua 5.2", 3) --[*] + end + f = debug.getinfo(f+2, 'f').func + elseif type(f) ~= 'function' then + error(("bad argument #1 to '%s' (number expected, got %s)"):format(type(name, f)), 2) + end + return f + end + -- [*] might simulate with table keyed by coroutine.running() + + -- 5.1 style `setfenv` implemented in 5.2 + function M.setfenv(f, t) + local f = envhelper(f, 'setfenv') + local up, val, unknown = envlookup(f) + if up then + debug.upvaluejoin(f, up, function() return up end, 1) -- unique upvalue [*] + debug.setupvalue(f, up, t) + else + local what = debug.getinfo(f, 'S').what + if what ~= 'Lua' and what ~= 'main' then -- not Lua func + error("'setfenv' cannot change environment of given object", 2) + end -- else ignore no _ENV upvalue (warning: incompatible with 5.1) + end + end + -- [*] http://lua-users.org/lists/lua-l/2010-06/msg00313.html + + -- 5.1 style `getfenv` implemented in 5.2 + function M.getfenv(f) + if f == 0 or f == nil then return _G end -- simulated behavior + local f = envhelper(f, 'setfenv') + local up, val = envlookup(f) + if not up then return _G end -- simulated behavior [**] + return val + end + -- [**] possible reasons: no _ENV upvalue, C function +end + + +return M + +--[[ FILE rockspec.in + +package = 'compat_env' +version = '$(_VERSION)-1' +source = { + url = 'https://raw.github.com/gist/1654007/$(GITID)/compat_env.lua', + --url = 'https://raw.github.com/gist/1654007/compat_env.lua', -- latest raw + --url = 'https://gist.github.com/gists/1654007/download', + md5 = '$(MD5)' +} +description = { + summary = 'Lua 5.1/5.2 environment compatibility functions', + detailed = [=[ + Provides Lua 5.1/5.2 environment related compatibility functions. + This includes implementations of Lua 5.2 style `load` and `loadfile` + for use in Lua 5.1. It also includes Lua 5.1 style `getfenv` and `setfenv` + for use in Lua 5.2. + ]=], + license = 'MIT/X11', + homepage = 'https://gist.github.com/1654007', + maintainer = 'David Manura' +} +dependencies = {} -- Lua 5.1 or 5.2 +build = { + type = 'builtin', + modules = { + ['compat_env'] = 'compat_env.lua' + } +} + +--]]--------------------------------------------------------------------- + +--[[ FILE test.lua + +-- test.lua - test suite for compat_env module. + +local CL = require 'compat_env' +local load = CL.load +local loadfile = CL.loadfile +local setfenv = CL.setfenv +local getfenv = CL.getfenv + +local function checkeq(a, b, e) + if a ~= b then error( + 'not equal ['..tostring(a)..'] ['..tostring(b)..'] ['..tostring(e)..']') + end +end +local function checkerr(pat, ok, err) + assert(not ok, 'checkerr') + assert(type(err) == 'string' and err:match(pat), err) +end + +-- test `load` +checkeq(load('return 2')(), 2) +checkerr('expected near', load'return 2 2') +checkerr('text chunk', load('return 2', nil, 'b')) +checkerr('text chunk', load('', nil, 'b')) +checkerr('binary chunk', load('\027', nil, 't')) +checkeq(load('return 2*x',nil,'bt',{x=5})(), 10) +checkeq(debug.getinfo(load('')).source, '') +checkeq(debug.getinfo(load('', 'foo')).source, 'foo') + +-- test `loadfile` +local fh = assert(io.open('tmp.lua', 'wb')) +fh:write('return (...) or x') +fh:close() +checkeq(loadfile('tmp.lua')(2), 2) +checkeq(loadfile('tmp.lua', 't')(2), 2) +checkerr('text chunk', loadfile('tmp.lua', 'b')) +checkeq(loadfile('tmp.lua', nil, {x=3})(), 3) +checkeq(debug.getinfo(loadfile('tmp.lua')).source, '@tmp.lua') +checkeq(debug.getinfo(loadfile('tmp.lua', 't', {})).source, '@tmp.lua') +os.remove'tmp.lua' + +-- test `setfenv`/`getfenv` +x = 5 +local a,b=true; local function f(c) if a then return x,b,c end end +setfenv(f, {x=3}) +checkeq(f(), 3) +checkeq(getfenv(f).x, 3) +checkerr('cannot change', pcall(setfenv, string.len, {})) -- C function +checkeq(getfenv(string.len), _G) -- C function +local function g() + setfenv(1, {x=4}) + checkeq(getfenv(1).x, 4) + return x +end +checkeq(g(), 4) -- numeric level +if _G._VERSION ~= 'Lua 5.1' then + checkerr('unsupported', pcall(setfenv, 0, {})) +end +checkeq(getfenv(0), _G) +checkeq(getfenv(), _G) -- no arg +checkeq(x, 5) -- main unaltered +setfenv(function()end, {}) -- no upvalues, ignore +checkeq(getfenv(function()end), _G) -- no upvaluse +if _G._VERSION ~= 'Lua 5.1' then + checkeq(getfenv(setfenv(function()end, {})), _G) -- warning: incompatible with 5.1 +end +x = nil + +print 'OK' + +--]]--------------------------------------------------------------------- + +--[[ FILE CHANGES.txt +0.2.20120124 + Renamed module to compat_env (from compat_load) + Add getfenv/setfenv functions + +0.1.20120121 + Initial public release +--]] + diff --git a/builders/lua-inspect/lib/luainspect/delimited.lua b/builders/lua-inspect/lib/luainspect/delimited.lua new file mode 100644 index 000000000..e20fc4a93 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/delimited.lua @@ -0,0 +1,46 @@ +-- luainspect.delimited - Convert AST to delimited text using LuaInspect info embedded. +-- + +--! require 'luainspect.typecheck' (context) + +local M = {} + +local LI = require"luainspect.init" + + +local function escape(s) + -- s = s:gsub('\n', '\\n') -- escape new lines + s = s:gsub('"', '""') -- escape double quotes + if s:match'["\r\n,]' then s = '"'..s..'"' end -- escape with double quotes + return s +end + + +local function describe(token, tokenlist, src) + if token then + local ast = token.ast + if token.tag == 'Id' or ast.isfield then + local line = 'id' + if ast.id then line = line .. ",id" .. ast.id end + line = line .. ',' .. escape(table.concat(LI.get_var_attributes(ast),' ')) + line = line .. ',' .. escape(LI.get_value_details(ast, tokenlist, src):gsub('\n', ';')) + return line + end + end +end + + +function M.ast_to_delimited(ast, src, tokenlist) + local fmt_tokens = {} + for _, token in ipairs(tokenlist) do + local fchar, lchar = token.fpos, token.lpos + local desc = describe(token, tokenlist, src) + if desc then + fmt_tokens[#fmt_tokens + 1] = ("%d,%d,%s\n"):format(fchar, lchar, desc) + end + end + return table.concat(fmt_tokens) +end + + +return M diff --git a/builders/lua-inspect/lib/luainspect/dump.lua b/builders/lua-inspect/lib/luainspect/dump.lua new file mode 100644 index 000000000..6a6e9805d --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/dump.lua @@ -0,0 +1,90 @@ +-- Recursive object dumper, for debugging. +-- (c) 2010 David Manura, MIT License. + +local M = {} + +-- My own object dumper. +-- Intended for debugging, not serialization, with compact formatting. +-- Robust against recursion. +-- Renders Metalua table tag fields specially {tag=X, ...} --> "`X{...}". +-- On first call, only pass parameter o. +-- CATEGORY: AST debug +local ignore_keys_ = {lineinfo=true} +local norecurse_keys_ = {parent=true, ast=true} +local function dumpstring_key_(k, isseen, newindent) + local ks = type(k) == 'string' and k:match'^[%a_][%w_]*$' and k or + '[' .. M.dumpstring(k, isseen, newindent) .. ']' + return ks +end +local function sort_keys_(a, b) + if type(a) == 'number' and type(b) == 'number' then + return a < b + elseif type(a) == 'number' then + return false + elseif type(b) == 'number' then + return true + elseif type(a) == 'string' and type(b) == 'string' then + return a < b + else + return tostring(a) < tostring(b) -- arbitrary + end +end +function M.dumpstring(o, isseen, indent, key) + isseen = isseen or {} + indent = indent or '' + + if type(o) == 'table' then + if isseen[o] or norecurse_keys_[key] then + return (type(o.tag) == 'string' and '`' .. o.tag .. ':' or '') .. tostring(o) + else isseen[o] = true end -- avoid recursion + + local used = {} + + local tag = o.tag + local s = '{' + if type(o.tag) == 'string' then + s = '`' .. tag .. s; used['tag'] = true + end + local newindent = indent .. ' ' + + local ks = {}; for k in pairs(o) do ks[#ks+1] = k end + table.sort(ks, sort_keys_) + --for i,k in ipairs(ks) do print ('keys', k) end + + local forcenummultiline + for k in pairs(o) do + if type(k) == 'number' and type(o[k]) == 'table' then forcenummultiline = true end + end + + -- inline elements + for _,k in ipairs(ks) do + if used[k] then -- skip + elseif ignore_keys_[k] then used[k] = true + elseif (type(k) ~= 'number' or not forcenummultiline) and + type(k) ~= 'table' and (type(o[k]) ~= 'table' or norecurse_keys_[k]) + then + s = s .. dumpstring_key_(k, isseen, newindent) .. '=' .. M.dumpstring(o[k], isseen, newindent, k) .. ', ' + used[k] = true + end + end + + -- elements on separate lines + local done + for _,k in ipairs(ks) do + if not used[k] then + if not done then s = s .. '\n'; done = true end + s = s .. newindent .. dumpstring_key_(k, isseen) .. '=' .. M.dumpstring(o[k], isseen, newindent, k) .. ',\n' + end + end + s = s:gsub(',(%s*)$', '%1') + s = s .. (done and indent or '') .. '}' + return s + elseif type(o) == 'string' then + return string.format('%q', o) + else + return tostring(o) + end +end + +return M + diff --git a/builders/lua-inspect/lib/luainspect/globals.lua b/builders/lua-inspect/lib/luainspect/globals.lua new file mode 100644 index 000000000..2f94394cf --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/globals.lua @@ -0,0 +1,216 @@ +-- LuaInspect.globals - identifier scope analysis +-- Locates locals, globals, and their definitions. +-- +-- (c) D.Manura, 2008-2010, MIT license. + +-- based on http://lua-users.org/wiki/DetectingUndefinedVariables + +local M = {} + +--! require 'luainspect.typecheck' (context) + +local LA = require "luainspect.ast" + +local function definelocal(scope, name, ast) + if scope[name] then + scope[name].localmasked = true + ast.localmasking = scope[name] + end + scope[name] = ast + if name == '_' then ast.isignore = true end +end + +-- Resolves scoping and usages of variable in AST. +-- Data Notes: +-- ast.localdefinition refers to lexically scoped definition of `Id node `ast`. +-- If ast.localdefinition == ast then ast is a "lexical definition". +-- If ast.localdefinition == nil, then variable is global. +-- ast.functionlevel is the number of functions the AST is contained in. +-- ast.functionlevel is defined iff ast is a lexical definition. +-- ast.isparam is true iff ast is a lexical definition and a function parameter. +-- ast.isset is true iff ast is a lexical definition and exists an assignment on it. +-- ast.isused is true iff ast is a lexical definition and has been referred to. +-- ast.isignore is true if local variable should be ignored (e.g. typically "_") +-- ast.localmasking - for a lexical definition, this is set to the lexical definition +-- this is masking (i.e. same name). nil if not masking. +-- ast.localmasked - true iff lexical definition masked by another lexical definition. +-- ast.isfield is true iff `String node ast is used for field access on object, +-- e.g. x.y or x['y'].z +-- ast.previous - For `Index{o,s} or `Invoke{o,s,...}, s.previous == o +local function traverse(ast, scope, globals, level, functionlevel) + scope = scope or {} + + local blockrecurse + + -- operations on walking down the AST + if ast.tag == 'Local' then + blockrecurse = 1 + -- note: apply new scope after processing values + elseif ast.tag == 'Localrec' then + local namelist_ast, valuelist_ast = ast[1], ast[2] + for _,value_ast in ipairs(namelist_ast) do + assert(value_ast.tag == 'Id') + local name = value_ast[1] + local parentscope = getmetatable(scope).__index + definelocal(parentscope, name, value_ast) + value_ast.localdefinition = value_ast + value_ast.functionlevel = functionlevel + end + blockrecurse = 1 + elseif ast.tag == 'Id' then + local name = ast[1] + if scope[name] then + ast.localdefinition = scope[name] + ast.functionlevel = functionlevel + scope[name].isused = true + else -- global, do nothing + end + elseif ast.tag == 'Function' then + local paramlist_ast, body_ast = ast[1], ast[2] + functionlevel = functionlevel + 1 + for _,param_ast in ipairs(paramlist_ast) do + local name = param_ast[1] + assert(param_ast.tag == 'Id' or param_ast.tag == 'Dots') + if param_ast.tag == 'Id' then + definelocal(scope, name, param_ast) + param_ast.localdefinition = param_ast + param_ast.functionlevel = functionlevel + param_ast.isparam = true + end + end + blockrecurse = 1 + elseif ast.tag == 'Set' then + local reflist_ast, valuelist_ast = ast[1], ast[2] + for _,ref_ast in ipairs(reflist_ast) do + if ref_ast.tag == 'Id' then + local name = ref_ast[1] + if scope[name] then + scope[name].isset = true + else + if not globals[name] then + globals[name] = {set=ref_ast} + end + end + end + end + --ENHANCE? We could differentiate assignments to x (which indicates that + -- x is not const) and assignments to a member of x (which indicates that + -- x is not a pointer to const) and assignments to any nested member of x + -- (which indicates that x it not a transitive const). + elseif ast.tag == 'Fornum' then + blockrecurse = 1 + elseif ast.tag == 'Forin' then + blockrecurse = 1 + end + + -- recurse (depth-first search down the AST) + if ast.tag == 'Repeat' then + local block_ast, cond_ast = ast[1], ast[2] + local scope = scope + for _,stat_ast in ipairs(block_ast) do + scope = setmetatable({}, {__index = scope}) + traverse(stat_ast, scope, globals, level+1, functionlevel) + end + scope = setmetatable({}, {__index = scope}) + traverse(cond_ast, scope, globals, level+1, functionlevel) + elseif ast.tag == 'Fornum' then + local name_ast, block_ast = ast[1], ast[#ast] + -- eval value list in current scope + for i=2, #ast-1 do traverse(ast[i], scope, globals, level+1, functionlevel) end + -- eval body in next scope + local name = name_ast[1] + definelocal(scope, name, name_ast) + name_ast.localdefinition = name_ast + name_ast.functionlevel = functionlevel + traverse(block_ast, scope, globals, level+1, functionlevel) + elseif ast.tag == 'Forin' then + local namelist_ast, vallist_ast, block_ast = ast[1], ast[2], ast[3] + -- eval value list in current scope + traverse(vallist_ast, scope, globals, level+1, functionlevel) + -- eval body in next scope + for _,name_ast in ipairs(namelist_ast) do + local name = name_ast[1] + definelocal(scope, name, name_ast) + name_ast.localdefinition = name_ast + name_ast.functionlevel = functionlevel + end + traverse(block_ast, scope, globals, level+1, functionlevel) + else -- normal + for i,v in ipairs(ast) do + if i ~= blockrecurse and type(v) == 'table' then + local scope = setmetatable({}, {__index = scope}) + traverse(v, scope, globals, level+1, functionlevel) + end + end + end + + -- operations on walking up the AST + if ast.tag == 'Local' then + -- Unlike Localrec, variables come into scope after evaluating values. + local namelist_ast, valuelist_ast = ast[1], ast[2] + for _,name_ast in ipairs(namelist_ast) do + assert(name_ast.tag == 'Id') + local name = name_ast[1] + local parentscope = getmetatable(scope).__index + definelocal(parentscope, name, name_ast) + name_ast.localdefinition = name_ast + name_ast.functionlevel = functionlevel + end + elseif ast.tag == 'Index' then + if ast[2].tag == 'String' then + ast[2].isfield = true + ast[2].previous = ast[1] + end + elseif ast.tag == 'Invoke' then + assert(ast[2].tag == 'String') + ast[2].isfield = true + ast[2].previous = ast[1] + end +end + +function M.globals(ast) + -- Default list of defined variables. + local scope = setmetatable({}, {}) + local globals = {} + traverse(ast, scope, globals, 1, 1) -- Start check. + + return globals +end + + +-- Gets locals in scope of statement of block ast. If isafter is true and ast is statement, +-- uses scope just after statement ast. +-- Assumes 'parent' attributes on ast are marked. +-- Returns table mapping name -> AST local definition. +function M.variables_in_scope(ast, isafter) + local scope = {} + local cast = ast + while cast.parent do + local midx = LA.ast_idx(cast.parent, cast) + for idx=1,midx do + local bast = cast.parent[idx] + if bast.tag == 'Localrec' or bast.tag == 'Local' and (idx < midx or isafter) then + local names_ast = bast[1] + for bidx=1,#names_ast do + local name_ast = names_ast[bidx] + local name = name_ast[1] + scope[name] = name_ast + end + elseif cast ~= ast and (bast.tag == 'For' or bast.tag == 'Forin' or bast.tag == 'Function') then + local names_ast = bast[1] + for bidx=1,#names_ast do + local name_ast = names_ast[bidx] + if name_ast.tag == 'Id' then --Q: or maybe `Dots should be included + local name = name_ast[1] + scope[name] = name_ast + end + end + end + end + cast = cast.parent + end + return scope +end + + +return M diff --git a/builders/lua-inspect/lib/luainspect/html.lua b/builders/lua-inspect/lib/luainspect/html.lua new file mode 100644 index 000000000..f3292e4c5 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/html.lua @@ -0,0 +1,101 @@ +-- luainspect.html - Convert AST to HTML using LuaInspect info embedded. +-- +-- (c) 2010 David Manura, MIT License. + +--! require 'luainspect.typecheck' (context) + +local M = {} + +local LI = require "luainspect.init" + +-- FIX!!! improve: should be registered utility function +local function escape_html(s) + return s:gsub('&', '&'):gsub('<', '<'):gsub('>', '>'):gsub('"', '"') +end + +local function annotate_source(src, ast, tokenlist, emit) + local start = 1 + local fmt_srcs = {} + for _,token in ipairs(tokenlist) do + local fchar, lchar = token.fpos, token.lpos + if fchar > start then + table.insert(fmt_srcs, emit(src:sub(start, fchar-1))) + end + table.insert(fmt_srcs, emit(src:sub(fchar, lchar), token)) + start = lchar + 1 + end + if start <= #src then + table.insert(fmt_srcs, emit(src:sub(start))) + end + return table.concat(fmt_srcs) +end + +function M.ast_to_html(ast, src, tokenlist, options) + local src_html = annotate_source(src, ast, tokenlist, function(snip_src, token) + local snip_html = escape_html(snip_src) + if token then + local ast = token.ast + if token.tag == 'Id' or ast.isfield then + local class = 'id ' + class = class .. table.concat(LI.get_var_attributes(ast), " ") + if ast.id then class = class.." id"..ast.id end + local desc_html = escape_html(LI.get_value_details(ast, tokenlist, src)) + if ast.lineinfo then + local linenum = ast.lineinfo.first[1] + desc_html = desc_html .. '\nused-line:' .. linenum + end + return ""..snip_html..""..desc_html.."" + elseif token.tag == 'Comment' then + return ""..snip_html.."" + elseif token.tag == 'String' then -- note: excludes ast.isfield + return ""..snip_html.."" + elseif token.tag == 'Keyword' then + local id = token.keywordid and 'idk'..tostring(token.keywordid) or '' + return ""..snip_html.."" + end + end + return snip_html + end) + + + local function get_line_numbers_html(src) + local out_htmls = {} + local linenum = 1 + for line in src:gmatch("[^\n]*\n?") do + if line == "" then break end + table.insert(out_htmls, string.format('%d:\n', linenum, linenum)) + linenum = linenum + 1 + end + return table.concat(out_htmls) + end + + local line_numbers_html = get_line_numbers_html(src) + + options = options or {} + local libpath = options.libpath or '.' + + src_html = [[ + + + + + + + + + + + +
+
]] .. line_numbers_html .. [[
+
]] .. src_html .. [[
+
+
+ +]] + + return src_html +end + +return M diff --git a/builders/lua-inspect/lib/luainspect/init.lua b/builders/lua-inspect/lib/luainspect/init.lua new file mode 100644 index 000000000..ac22e0421 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/init.lua @@ -0,0 +1,1431 @@ +-- luainspect.init - core LuaInspect source analysis. +-- +-- This module is a bit more high level than luainspect.ast. It deals more with +-- interpretation/inference of semantics of an AST. It also uses luainspect.globals, +-- which does the basic semantic interpretation of globals/locals. +-- +-- (c) 2010 David Manura, MIT License. + +local M = {} + +-- This is the API version. It is an ISO8601 date expressed as a fraction. +M.APIVERSION = 0.20100805 + +local LA = require "luainspect.ast" +local LD = require "luainspect.dump" +local LG = require "luainspect.globals" +local LS = require "luainspect.signatures" +local T = require "luainspect.types" +local COMPAT = require "luainspect.compat_env" + +--! require 'luainspect.typecheck' (context) + +local ENABLE_RETURN_ANALYSIS = true +local DETECT_DEADCODE = false -- may require more validation (false positives) + + +-- Functional forms of Lua operators. +-- Note: variable names like _1 are intentional. These affect debug info and +-- will display in any error messages. +local ops = {} +ops['add'] = function(_1,_2) return _1+_2 end +ops['sub'] = function(_1,_2) return _1-_2 end +ops['mul'] = function(_1,_2) return _1*_2 end +ops['div'] = function(_1,_2) return _1/_2 end +ops['mod'] = function(_1,_2) return _1%_2 end +ops['pow'] = function(_1,_2) return _1^_2 end +ops['concat'] = function(_1,_2) return _1.._2 end +ops['eq'] = function(_1,_2) return _1==_2 end +ops['lt'] = function(_1,_2) return _1<_2 end +ops['le'] = function(_1,_2) return _1<=_2 end +ops['and'] = function(_1,_2) return _1 and _2 end +ops['or'] = function(_1,_2) return _1 or _2 end +ops['not'] = function(_1) return not _1 end +ops['len'] = function(_1) return #_1 end +ops['unm'] = function(_1) return -_1 end + + +-- Performs binary operation. Supports types. +local function dobinop(opid, a, b) + if (a == T.number or b == T.number) and + (a == T.number or type(a) == 'number' ) and + (b == T.number or type(b) == 'number' ) + then + if opid == 'eq' or opid == 'lt' or opid == 'le' then + return T.boolean + elseif opid == 'concat' then + return T.string + else + return T.number + end + elseif (a == T.string or b == T.string) and + (a == T.string or type(a) == 'string' ) and + (b == T.string or type(b) == 'string' ) + then + if opid == 'concat' or opid == 'and' or opid == 'or' then + return T.string + elseif opid == 'eq' or opid == 'lt' or opid == 'le' then + return T.boolean + else + return T.number + end + elseif (a == T.boolean or b == T.boolean) and + (a == T.boolean or type(a) == 'boolean' ) and + (b == T.boolean or type(b) == 'boolean' ) + then + if opid == 'eq' or opid == 'and' or opid == 'or' then + return T.boolean + else + error('invalid operation on booleans: ' .. opid, 0) + end + elseif T.istype[a] or T.istype[b] then + return T.universal + else + return ops[opid](a, b) + end +end + + +-- Performs unary operation. Supports types. +local function dounop(opid, a) + if opid == 'not' then + if T.istype[a] then + return T.boolean + else + return ops[opid](a) + end + elseif a == T.number then + if opid == 'unm' then + return T.number + else -- 'len' + error('invalid operation on number: ' .. opid, 0) + end + elseif a == T.string then + return T.number + elseif a == T.boolean then + error('invalid operation on boolean: ' .. opid, 0) + elseif T.istype[a] then + return nil, 'unknown' + else + return ops[opid](a) + end +end + +-- Like info in debug.getinfo but inferred by static analysis. +-- object -> {fpos=fpos, source="@" .. source, fast=ast, tokenlist=tokenlist} +-- Careful: value may reference key (affects pre-5.2 which lacks emphemerons). +-- See also ast.nocollect. +M.debuginfo = setmetatable({}, {__mode='v'}) + +-- Modules loaded via require_inspect. +-- module name string -> {return value, AST node} +-- note: AST node is maintained to prevent nocollect fields in ast being collected. +-- note: not a weak table. +M.package_loaded = {} + +-- Stringifies interpreted value for debugging. +-- CATEGORY: debug +local function debugvalue(ast) + local s + if ast then + s = ast.value ~= T.universal and 'known:' .. tostring(ast.value) or 'unknown' + else + s = '?' + end + return s +end + + +-- Reads contents of text file in path, in binary mode. +-- On error, returns nil and error message. +local function readfile(path) + local fh, err = io.open(path, 'rb') + if fh then + local data; data, err = fh:read'*a' + if data then return data end + end + return nil, err +end + +-- Similar to string.gsub but with plain replacement (similar to option in string.match) +-- http://lua-users.org/lists/lua-l/2002-04/msg00118.html +-- CATEGORY: utility/string +local function plain_gsub(s, pattern, repl) + repl = repl:gsub('(%%)', '%%%%') + return s:gsub(pattern, repl) +end + +-- Infer name of variable or literal that AST node represents. +-- This is for debugging messages. +local function infer_name(ast) + if ast == nil then return nil + elseif ast.tag == 'Id' then return "'"..ast[1].."'" + elseif ast.tag == 'Number' then return 'number' + elseif ast.tag == 'String' then return 'string' + elseif ast.tag == 'True' then return 'true' + elseif ast.tag == 'False' then return 'false' + elseif ast.tag == 'Nil' then return 'nil' + else return nil end +end + +--[[ + This is like `pcall` but any error string returned does not contain the + "chunknamem:currentline: " prefix (based on luaL_where) if the error occurred + in the current file. This avoids error messages in user code (f) + being reported as being inside this module if this module calls user code. + Also, local variable names _1, _2, etc. in error message are replaced with names + inferred (if any) from corresponding AST nodes in list `asts` (note: nil's in asts skip replacement). +--]] +local _prefix +local _clean +local function pzcall(f, asts, ...) + _prefix = _prefix or select(2, pcall(function() error'' end)):gsub(':%d+: *$', '') -- note: specific to current file. + _clean = _clean or function(asts, ok, ...) + if ok then return true, ... + else + local err = ... + if type(err) == 'string' then + if err:sub(1,#_prefix) == _prefix then + local more = err:match('^:%d+: *(.*)', #_prefix+1) + if more then + err = more + err = err:gsub([[local '_(%d+)']], function(name) return infer_name(asts[tonumber(name)]) end) + end + end + end + return ok, err + end + end + return _clean(asts, pcall(f, ...)) +end + +-- Loads source code of given module name. +-- Returns code followed by path. +-- note: will also search in the directory `spath` and its parents. +-- This should preferrably be an absolute path or it might not work correctly. +-- It must be slash terminated. +-- CATEGORY: utility/package +local function load_module_source(name, spath) + -- Append parent directories to list of paths to search. + local package_path = package.path + local ppath = spath + repeat + package_path = package_path .. ';' .. ppath .. '?.lua;' .. ppath .. '?/init.lua' + local nsub + ppath, nsub = ppath:gsub('[^\\/]+[\\/]$', '') + until nsub == 0 + + for spec in package_path:gmatch'[^;]+' do + local testpath = plain_gsub(spec, '%?', (name:gsub('%.', '/'))) + local src, err_ = readfile(testpath) + if src then return src, testpath end + end + return nil +end + + +-- Clears global state. +-- This includes cached inspected modules. +function M.clear_cache() + for k,v in pairs(M.package_loaded) do + M.package_loaded[k] = nil + end +end + + +-- Gets all keywords related to AST `ast`, where `top_ast` is the root of `ast` +-- and `src` is source code of `top_ast` +-- Related keywords are defined as all keywords directly associated with block containing node +-- `ast`. Furthermore, break statements are related to containing loop statements, +-- and return statements are related to containing function statement (if any). +-- function declaration syntactic sugar is handled specially too to ensure the 'function' keyword +-- is highlighted even though it may be outside of the `Function AST. +-- +-- Returns token list or nil if not applicable. Returned `ast` is AST containing related keywords. +-- CATEGORY: keyword comprehension +local iskeystat = {Do=true, While=true, Repeat=true, If=true, Fornum=true, Forin=true, + Local=true, Localrec=true, Return=true, Break=true, Function=true, + Set=true -- note: Set for `function name` +} +local isloop = {While=true, Repeat=true, Fornum=true, Forin=true} +local isblock = {Do=true, While=true, Repeat=true, If=true, Fornum=true, Forin=true, Function=true} +function M.related_keywords(ast, top_ast, tokenlist, src) + -- Expand or contract AST for certain contained statements. + local more + if ast.tag == 'Return' then + -- if `return` selected, that consider containing function selected (if any) + if not ast.parent then LA.mark_parents(top_ast) end + local ancestor_ast = ast.parent + while ancestor_ast ~= nil and ancestor_ast.tag ~= 'Function' do + ancestor_ast = ancestor_ast.parent + end + if ancestor_ast then ast = ancestor_ast end -- but only change if exists + elseif ast.tag == 'Break' then + -- if `break` selected, that consider containing loop selected + if not ast.parent then LA.mark_parents(top_ast) end + local ancestor_ast = ast.parent + while ancestor_ast ~= nil and not isloop[ancestor_ast.tag] do + ancestor_ast = ancestor_ast.parent + end + ast = ancestor_ast + elseif ast.tag == 'Set' then + local val1_ast = ast[2][1] + if val1_ast.tag == 'Function' then + local token = tokenlist[LA.ast_idx_range_in_tokenlist(tokenlist, ast)] + if token.tag == 'Keyword' and token[1] == 'function' then -- function with syntactic sugar `function f` + ast = ast[2][1] -- select `Function node + else + more = true + end + else + more = true + end + elseif ast.tag == 'Localrec' and ast[2][1].tag == 'Function' then + -- if `local function f` selected, which becomes a `Localrec, consider `Function node. + ast = ast[2][1] + --IMPROVE: only contract ast if `function` part of `local function` is selected. + else + more = true + end + if more then -- not yet handled + -- Consider containing block. + if not ast.parent then LA.mark_parents(top_ast) end + local ancestor_ast = ast + while ancestor_ast ~= top_ast and not isblock[ancestor_ast.tag] do + ancestor_ast = ancestor_ast.parent + end + ast = ancestor_ast + end + + -- keywords in statement/block. + if iskeystat[ast.tag] then + local keywords = {} + for i=1,#tokenlist do + local token = tokenlist[i] + if token.ast == ast and token.tag == 'Keyword' then + keywords[#keywords+1] = token + end + end + + -- Expand keywords for certaining statements. + if ast.tag == 'Function' then + -- if `Function, also select 'function' and 'return' keywords + local function f(ast) + for _,cast in ipairs(ast) do + if type(cast) == 'table' then + if cast.tag == 'Return' then + local token = tokenlist[LA.ast_idx_range_in_tokenlist(tokenlist, cast)] + keywords[#keywords+1] = token + elseif cast.tag ~= 'Function' then f(cast) end + end + end + end + f(ast) + if not ast.parent then LA.mark_parents(top_ast) end + local grand_ast = ast.parent.parent + if grand_ast.tag == 'Set' then + local token = tokenlist[LA.ast_idx_range_in_tokenlist(tokenlist, grand_ast)] + if token.tag == 'Keyword' and token[1] == 'function' then + keywords[#keywords+1] = token + end + elseif grand_ast.tag == 'Localrec' then + local tidx = LA.ast_idx_range_in_tokenlist(tokenlist, grand_ast) + repeat tidx = tidx + 1 until tokenlist[tidx].tag == 'Keyword' and tokenlist[tidx][1] == 'function' + local token = tokenlist[tidx] + keywords[#keywords+1] = token + end + elseif isloop[ast.tag] then + -- if loop, also select 'break' keywords + local function f(ast) + for _,cast in ipairs(ast) do + if type(cast) == 'table' then + if cast.tag == 'Break' then + local tidx = LA.ast_idx_range_in_tokenlist(tokenlist, cast) + keywords[#keywords+1] = tokenlist[tidx] + elseif not isloop[cast.tag] then f(cast) end + end + end + end + f(ast) + end + + return keywords, ast + end + return nil, ast +end + + +-- Mark tokenlist (top_ast/tokenlist/src) with keywordid AST attributes. +-- All keywords related to each other have the same keyword ID integer. +-- NOTE: This is not done/undone by inspect/uninspect. +-- CATEGORY: keyword comprehension +function M.mark_related_keywords(top_ast, tokenlist, src) + local id = 0 + local idof = {} + for _, token in ipairs(tokenlist) do + if token.tag == 'Keyword' and not idof[token] then + id = id + 1 + local match_ast = + LA.smallest_ast_containing_range(top_ast, tokenlist, token.fpos, token.lpos) + local ktokenlist = M.related_keywords(match_ast, top_ast, tokenlist, src) + if ktokenlist then + for _, ktoken in ipairs(ktokenlist) do + ktoken.keywordid = id + idof[ktoken] = true + end + end + -- note: related_keywords may return a keyword set not containing given keyword. + end + end +end + + +-- function for t[k] +local function tindex(_1, _2) return _1[_2] end + +local unescape = {['d'] = '.'} + + + +-- Sets known value on ast to v if ast not pegged. +-- CATEGORY: utility function for infer_values. +local function set_value(ast, v) + if not ast.isvaluepegged then + ast.value = v + end +end + + +local function known(o) + return not T.istype[o] +end +local function unknown(o) + return T.istype[o] +end + + +-- CATEGORY: utility function for infer_values. +local function tastnewindex(t_ast, k_ast, v_ast) + if known(t_ast.value) and known(k_ast.value) and known(v_ast.value) then + local _1, _2, _3 = t_ast.value, k_ast.value, v_ast.value + if _1[_2] ~= nil and _3 ~= _1[_2] then -- multiple values + return T.universal + else + _1[_2] = _3 + return _3 + end + else + return T.universal + end +end + + +-- Gets expected number of parameters for function (min, max) values. +-- In case of vararg, max is unknown and set to nil. +local function function_param_range(ast) + local names_ast = ast[1] + if #names_ast >= 1 and names_ast[#names_ast].tag == 'Dots' then + return #names_ast-1, nil + else + return #names_ast, #names_ast + end +end + +-- Gets number of arguments to function call: (min, max) range. +-- In case of trailing vararg or function call, max is unknown and set to nil. +local function call_arg_range(ast) + if ast.tag == 'Invoke' then + if #ast >= 3 and + (ast[#ast].tag == 'Dots' or ast[#ast].tag == 'Call' or ast[#ast].tag == 'Invoke') + then + return #ast-2, nil + else + return #ast-1, #ast-1 + end + else + if #ast >= 2 and + (ast[#ast].tag == 'Dots' or ast[#ast].tag == 'Call' or ast[#ast].tag == 'Invoke') + then + return #ast-2, nil + else + return #ast-1, #ast-1 + end + end +end + + +-- Reports warning. List of strings. +local function warn(report, ...) + report('warning: ' .. table.concat({...}, ' ')) +end + +-- Reports status messages. List of strings. +local function status(report, ...) + report('status: ' .. table.concat({...}, ' ')) +end + +-- unique value used to detect require loops (A require B require A) +local REQUIRE_SENTINEL = function() end + +-- Gets single return value of chunk ast. Assumes ast is inspected. +local function chunk_return_value(ast) + local vinfo + if ENABLE_RETURN_ANALYSIS then + local info = M.debuginfo[ast.value] + local retvals = info and info.retvals + if retvals then + vinfo = retvals[1] + else + vinfo = T.universal + end + else + if ast[#ast] and ast[#ast].tag == 'Return' and ast[#ast][1] then + vinfo = ast[#ast][1] + else + vinfo = T.universal + end + end + return vinfo +end + +-- Version of require that does source analysis (inspect) on module. +function M.require_inspect(name, report, spath) + local plinfo = M.package_loaded[name] + if plinfo == REQUIRE_SENTINEL then + warn(report, "loop in require when loading " .. name) + return nil + end + if plinfo then return plinfo[1] end + status(report, 'loading:' .. name) + M.package_loaded[name] = REQUIRE_SENTINEL -- avoid recursion on require loops + local msrc, mpath = load_module_source(name, spath) + local vinfo, mast + if msrc then + local err; mast, err = LA.ast_from_string(msrc, mpath) + if mast then + local mtokenlist = LA.ast_to_tokenlist(mast, msrc) + M.inspect(mast, mtokenlist, msrc, report) + vinfo = chunk_return_value(mast) + else + vinfo = T.error(err) + warn(report, err, " ", mpath) --Q:error printing good? + end + else + warn(report, 'module not found: ' .. name) + vinfo = T.error'module not found' --IMPROVE: include search paths? + end + M.package_loaded[name] = {vinfo, mast} + return vinfo +end + + +-- Marks AST node and all children as dead (ast.isdead). +local function mark_dead(ast) + LA.walk(ast, function(bast) bast.isdead = true end) +end + +-- Gets list of `Return statement ASTs in `Function (or chunk) f_ast, not including +-- return's in nested functions. Also returns boolean `has_implicit` indicating +-- whether function may return by exiting the function without a return statement. +-- Returns that are never exected are omitted (e.g. last return is omitted in +-- `function f() if x then return 1 else return 2 end return 3 end`). +-- Also marks AST nodes with ast.isdead (dead-code). +local function get_func_returns(f_ast) + local isalwaysreturn = {} + local returns = {} + local function f(ast, isdead) + for _,cast in ipairs(ast) do if type(cast) == 'table' then + if isdead then mark_dead(cast) end -- even if DETECT_DEADCODE disabled + if cast.tag ~= 'Function' and not isdead then -- skip nested functions + f(cast, isdead) -- depth-first traverse + end + if ast.tag ~= 'If' and isalwaysreturn[cast] then isdead = true end + -- subsequent statements in block never executed + end end + + -- Code on walking up AST: propagate children to parents + if ast.tag == 'Return' then + returns[#returns+1] = ast + isalwaysreturn[ast] = true + elseif ast.tag == 'If' then + if #ast%2 ~= 0 then -- has 'else' block + local isreturn = true + for i=2,#ast do + if (i%2==0 or i==#ast) and not isalwaysreturn[ast[i]] then isreturn = nil; break end + end + isalwaysreturn[ast] = isreturn + end + else -- note: iterates not just blocks, but should be ok + for i=1,#ast do + if isalwaysreturn[ast[i]] then + isalwaysreturn[ast] = true; break + end + end + end + end + f(f_ast, false) + local block_ast = f_ast.tag == 'Function' and f_ast[2] or f_ast + local has_implicit = not isalwaysreturn[block_ast] + return returns, has_implicit +end + +-- temporary hack? +local function valnode_normalize(valnode) + if valnode then + return valnode.value + else + return T.none + end +end + + +-- Gets return value at given return argument index, given list of `Return statements. +-- Return value is a superset of corresponding types in list of statements. +-- Example: {`Return{1,2,3}, `Return{1,3,'z'}} would return +-- 1, T.number, and T.universal for retidx 1, 2 and 3 respectively. +local function get_return_value(returns, retidx) + if #returns == 0 then return T.none + elseif #returns == 1 then + return valnode_normalize(returns[1][retidx]) + else + local combined_value = valnode_normalize(returns[1][retidx]) + for i=2,#returns do + local cur_value = valnode_normalize(returns[i][retidx]) + combined_value = T.superset_types(combined_value, cur_value) + if combined_value == T.universal then -- can't expand set further + return combined_value + end + end + return combined_value + --TODO: handle values with possibly any number of return values, like f() + end +end + + +-- Gets return values (or types) on `Function (or chunk) represented by given AST. +local function get_func_return_values(f_ast) + local returns, has_implicit = get_func_returns(f_ast) + if has_implicit then returns[#returns+1] = {tag='Return'} end + local returnvals = {n=0} + for retidx=1,math.huge do + local value = get_return_value(returns, retidx) + if value == T.none then break end + returnvals[#returnvals+1] = value + returnvals.n = returnvals.n + 1 + end + return returnvals +end +-- Example: AST of `function(x) if x then return 1,2,3 else return 1,3,"z" end end` +-- returns {1, T.number, T.universal}. + + +-- Given list of values, return the first nvalues values plus the rest of the values +-- as a tuple. Useful for things like +-- local ok, values = valuesandtuple(1, pcall(f)) +-- CATEGORY: utility function (list) +local function valuesandtuple(nvalues, ...) + if nvalues >= 1 then + return (...), valuesandtuple(nvalues-1, select(2, ...)) + else + return {n=select('#', ...), ...} + end +end + + +-- Infers values of variables. Also marks dead code (ast.isdead). +--FIX/WARNING - this probably needs more work +-- Sets top_ast.valueglobals, ast.value, ast.valueself +-- CATEGORY: code interpretation +function M.infer_values(top_ast, tokenlist, src, report) + if not top_ast.valueglobals then top_ast.valueglobals = {} end + + + -- infer values + LA.walk(top_ast, function(ast) -- walk down + if ast.tag == 'Function' then + local paramlist_ast = ast[1] + for i=1,#paramlist_ast do local param_ast = paramlist_ast[i] + if param_ast.value == nil then param_ast.value = T.universal end + end + end + end, function(ast) -- walk up + -- process `require` statements. + if ast.tag == 'Local' or ast.tag == 'Localrec' then + local vars_ast, values_ast = ast[1], ast[2] + local valuelist = #values_ast > 0 and values_ast[#values_ast].valuelist + for i=1,#vars_ast do + local var_ast, value_ast = vars_ast[i], values_ast[i] + local value + if value_ast then + value = value_ast.value + elseif valuelist then + local vlidx = i - #values_ast + 1 + value = valuelist.sizeunknown and vlidx > valuelist.n and T.universal or valuelist[vlidx] + end + set_value(var_ast, value) + end + elseif ast.tag == 'Set' then -- note: implementation similar to 'Local' + local vars_ast, values_ast = ast[1], ast[2] + local valuelist = #values_ast > 0 and values_ast[#values_ast].valuelist + for i=1,#vars_ast do + local var_ast, value_ast = vars_ast[i], values_ast[i] + local value + if value_ast then + value = value_ast.value + elseif valuelist then + local vlidx = i - #values_ast + 1 + value = valuelist.sizeunknown and vlidx > valuelist.n and T.universal or valuelist[vlidx] + end + if var_ast.tag == 'Index' then + local t_ast, k_ast = var_ast[1], var_ast[2] + if not T.istype[t_ast.value] then -- note: don't mutate types + local v_ast = {value=value} + local ok; ok, var_ast.value = pzcall(tastnewindex, {t_ast, k_ast, v_ast}, t_ast, k_ast, v_ast) + if not ok then var_ast.value = T.error(var_ast.value) end + --FIX: propagate to localdefinition? + end + else + assert(var_ast.tag == 'Id', var_ast.tag) + if var_ast.localdefinition then + set_value(var_ast, value) + else -- global + local name = var_ast[1] + top_ast.valueglobals[name] = value + end + end + --FIX: propagate to definition or localdefinition? + end + elseif ast.tag == 'Fornum' then + local var_ast = ast[1] + set_value(var_ast, T.number) + elseif ast.tag == 'Forin' then + local varlist_ast, iter_ast = ast[1], ast[2] + if #iter_ast == 1 and iter_ast[1].tag == 'Call' and iter_ast[1][1].value == ipairs then + for i, var_ast in ipairs(varlist_ast) do + if i == 1 then set_value(var_ast, T.number) + elseif i == 2 then set_value(var_ast, T.universal) + else set_value(var_ast, nil) end + end + elseif #iter_ast == 1 and iter_ast[1].tag == 'Call' and iter_ast[1][1].value == pairs then + for i, var_ast in ipairs(varlist_ast) do + if i <= 2 then set_value(var_ast, T.number) + else set_value(var_ast, nil) end + end + else -- general case, unknown iterator + for _, var_ast in ipairs(varlist_ast) do + set_value(var_ast, T.universal) + end + end + elseif ast.tag == 'Id' then + if ast.localdefinition then + local localdefinition = ast.localdefinition + if not localdefinition.isset then -- IMPROVE: support non-const (isset false) too + set_value(ast, localdefinition.value) + end + else -- global + local name = ast[1] + local v = top_ast.valueglobals[name] + if v ~= nil then + ast.value = v + else + local ok; ok, ast.value = pzcall(tindex, {{tag='Id', '_G'}, {tag='String', name}}, _G, name) + if not ok then ast.value = T.error(ast.value) end + end + end + elseif ast.tag == 'Index' then + local t_ast, k_ast = ast[1], ast[2] + if (known(t_ast.value) or T.istabletype[t_ast.value]) and known(k_ast.value) then + local ok; ok, ast.value = pzcall(tindex, {t_ast, k_ast}, t_ast.value, k_ast.value) + if not ok then ast.value = T.error(ast.value) end + end + elseif ast.tag == 'Call' or ast.tag == 'Invoke' then + -- Determine function to call (infer via index if method call). + local isinvoke = ast.tag == 'Invoke' + if isinvoke then + local t, k = ast[1].value, ast[2].value + if known(t) and known(k) then + local ok; ok, ast.valueself = pzcall(tindex, {ast[1], ast[2]}, t, k) + if not ok then ast.valueself = T.error(ast.valueself) end + end + end + local func; if isinvoke then func = ast.valueself else func = ast[1].value end + + -- Handle function call. + local argvalues_concrete = true; do -- true iff all arguments known precisely. + if #ast >= 2 then + local firstargvalue; if isinvoke then firstargvalue = ast.valueself else firstargvalue = ast[2].value end + if unknown(firstargvalue) then + argvalues_concrete = false + else -- test remaining args + for i=3,#ast do if unknown(ast[i].value) then argvalues_concrete = false; break end end + end + end + end + local found + if known(func) and argvalues_concrete then -- attempt call with concrete args + -- Get list of values of arguments. + local argvalues; do + argvalues = {n=#ast-1}; for i=1,argvalues.n do argvalues[i] = ast[i+1].value end + if isinvoke then argvalues[1] = ast.valueself end -- `self` + end + -- Any call to require is handled specially (source analysis). + if func == require and type(argvalues[1]) == 'string' then + local spath = ast.lineinfo.first[4] -- a HACK? relies on AST lineinfo + local val = M.require_inspect(argvalues[1], report, spath:gsub('[^\\/]+$', '')) + if known(val) and val ~= nil then + ast.value = val + found = true + end -- note: on nil value, assumes analysis failed (not found). This is a heuristic only. + end + -- Attempt call if safe. + if not found and (LS.safe_function[func] or func == pcall and LS.safe_function[argvalues[1]]) then + local ok; ok, ast.valuelist = valuesandtuple(1, pcall(func, unpack(argvalues,1,argvalues.n))) + ast.value = ast.valuelist[1]; if not ok then ast.value = T.error(ast.value) end + found = true + end + end + if not found then + -- Attempt mock function. Note: supports nonconcrete args too. + local mf = LS.mock_functions[func] + if mf then + ast.valuelist = mf.outputs; ast.value = ast.valuelist[1] + else + -- Attempt infer from return statements in function source. + local info = M.debuginfo[func] + if not info then -- try match from dynamic debug info + local dinfo = type(func) == 'function' and debug.getinfo(func) + if dinfo then + local source, linedefined = dinfo.source, dinfo.linedefined + if source and linedefined then + local sourceline = source .. ':' .. linedefined + info = M.debuginfo[sourceline] + end + end + end + local retvals = info and info.retvals + if retvals then + ast.valuelist = retvals; ast.value = ast.valuelist[1] + else + -- Could not infer. + ast.valuelist = {n=0, sizeunknown=true}; ast.value = T.universal + end + end + end + elseif ast.tag == 'String' or ast.tag == 'Number' then + ast.value = ast[1] + elseif ast.tag == 'True' or ast.tag == 'False' then + ast.value = (ast.tag == 'True') + elseif ast.tag == 'Function' or ast == top_ast then -- includes chunk + if ast.value == nil then -- avoid redefinition + local x + local val = function() x=nil end + local fpos = LA.ast_pos_range(ast, tokenlist) + local source = ast.lineinfo.first[4] -- a HACK? relies on AST lineinfo + local linenum = LA.pos_to_linecol(fpos, src) + local retvals + if ENABLE_RETURN_ANALYSIS then + retvals = get_func_return_values(ast) --Q:move outside of containing conditional? + end + local info = {fpos=fpos, source="@" .. source, fast=ast, tokenlist=tokenlist, retvals=retvals, top_ast = top_ast} + M.debuginfo[val] = info + local sourceline = '@' .. source .. ':' .. linenum + local oldinfo = M.debuginfo[sourceline] + if oldinfo then + if oldinfo.fast ~= ast then + -- Two functions on the same source line cannot necessarily be disambiguated. + -- Unfortuntely, Lua debuginfo lacks exact character position. + -- http://lua-users.org/lists/lua-l/2010-08/msg00273.html + -- So, just disable info if ambiguous. Note: a slight improvement is to use the lastlinedefined. + M.debuginfo[sourceline] = false + end + else + if oldinfo == nil then + M.debuginfo[sourceline] = info -- store by sourceline too for quick lookup from dynamic debug info + end -- else false (do nothing) + end + ast.value = val + ast.nocollect = info -- prevents garbage collection while ast exists + end + elseif ast.tag == 'Table' then + if ast.value == nil then -- avoid redefinition + local value = {} + local n = 1 + for _,east in ipairs(ast) do + if east.tag == 'Pair' then + local kast, vast = east[1], east[2] + if known(kast.value) and known(vast.value) then + if kast.value == nil then + -- IMPROVE? warn in some way? + else + value[kast.value] = vast.value + end + end + else + if known(east.value) then + value[n] = east.value + end + n = n + 1 + end + end + --table.foreach(value, print) + ast.value = value + end + elseif ast.tag == 'Paren' then + ast.value = ast[1].value + elseif ast.tag == 'Op' then + local opid, aast, bast = ast[1], ast[2], ast[3] + local ok + if bast then + ok, ast.value = pzcall(dobinop, {aast, bast}, opid, aast.value, bast.value) + else + ok, ast.value = pzcall(dounop, {aast}, opid, aast.value) + end + if not ok then ast.value = T.error(ast.value) end + elseif ast.tag == 'If' then + -- detect dead-code + if DETECT_DEADCODE then + for i=2,#ast,2 do local valnode = ast[i-1] + local bval = T.boolean_cast(valnode.value) + if bval == false then -- certainly false + mark_dead(ast[i]) + elseif bval == true then -- certainly true + for ii=i+1,#ast do if ii%2 == 0 or ii==#ast then -- following blocks are dead + mark_dead(ast[ii]) + end end + break + end + end + end + -- IMPROVE? `if true return end; f()` - f could be marked as deadcode + elseif ast.tag == 'While' then + -- detect dead-code + if DETECT_DEADCODE then + local expr_ast, body_ast = ast[1], ast[2] + if T.boolean_cast(expr_ast.value) == false then + mark_dead(body_ast) + end + end + end + end) +end + + +-- Labels variables with unique identifiers. +-- Sets ast.id, ast.resolvedname +-- CATEGORY: code interpretation +function M.mark_identifiers(ast) + local id = 0 + local seen_globals = {} + LA.walk(ast, function(ast) + if ast.tag == 'Id' or ast.isfield then + if ast.localdefinition then + if ast.localdefinition == ast then -- lexical definition + id = id + 1 + ast.id = id + else + ast.id = ast.localdefinition.id + end + elseif ast.isfield then + local previousid = ast.previous.id + if not previousid then -- note: ("abc"):upper() has no previous ID + id = id + 1 + previousid = id + end + local name = previousid .. '.' .. ast[1]:gsub('%%', '%%'):gsub('%.', '%d') + if not seen_globals[name] then + id = id + 1 + seen_globals[name] = id + end + ast.id = seen_globals[name] + + -- also resolve name + local previousresolvedname = ast.previous.resolvedname + if previousresolvedname then + ast.resolvedname = previousresolvedname .. '.' .. ast[1]:gsub('%%', '%%'):gsub('%.', '%d') + end + else -- global + local name = ast[1] + if not seen_globals[name] then + id = id + 1 + seen_globals[name] = id + end + ast.id = seen_globals[name] + + -- also resolve name + ast.resolvedname = ast[1] + end + end + end) +end + + +-- Environment in which to execute special comments (see below). +local env = setmetatable({}, {__index=_G}) +env.context = env + +env.number = T.number +env.string = T.string +env.boolean = T.boolean +env.error = T.error + + +-- Applies value to all identifiers with name matching pattern. +-- This command is callable inside special comments. +-- CATEGORY: code interpretation / special comment command +function env.apply_value(pattern, val) + local function f(ast) + if ast.tag == 'Id' and ast[1]:match(pattern) then + ast.value = val; ast.isvaluepegged = true + end + for _,bast in ipairs(ast) do + if type(bast) == 'table' then + f(bast) + end + end + end + f(env.ast) -- ast from environment + --UNUSED: + -- for i=env.asti, #env.ast do + -- local bast = env.ast[i] + -- if type(bast) == 'table' then f(bast) end + --end +end + + +-- Evaluates all special comments (i.e. comments prefixed by '!') in code. +-- This is similar to luaanalyze. +-- CATEGORY: code interpretation / special comments +function M.eval_comments(ast, tokenlist, report) + local function eval(command, ast) + --DEBUG('!', command:gsub('%s+$', ''), ast.tag) + local f, err = COMPAT.load(command, nil, 't', env) + if f then + env.ast = ast + local ok, err = pcall(f, ast) + if not ok then warn(report, err, ': ', command) end + env.ast = nil + else + warn(report, err, ': ', command) + end + end + + for idx=1,#tokenlist do + local token = tokenlist[idx] + if token.tag == 'Comment' then + local command = token[1]:match'^!(.*)' + if command then + local mast = LA.smallest_ast_containing_range(ast, tokenlist, token.fpos, token.lpos) + eval(command, mast) + end + end + end +end +--IMPROVE: in `do f() --[[!g()]] h()` only apply g to h. + + + + +-- Partially undoes effects of inspect(). +-- Note: does not undo mark_tag2 and mark_parents (see replace_statements). +-- CATEGORY: code interpretation +function M.uninspect(top_ast) + -- remove ast from M.debuginfo + for k, info in pairs(M.debuginfo) do + if info and info.top_ast == top_ast then + M.debuginfo[k] = nil + end + end + + -- Clean ast. + LA.walk(top_ast, function(ast) + -- undo inspect_globals.globals + ast.localdefinition = nil + ast.functionlevel = nil + ast.isparam = nil + ast.isset = nil + ast.isused = nil + ast.isignore = nil + ast.isfield = nil + ast.previous = nil + ast.localmasked = nil + ast.localmasking = nil + + -- undo mark_identifiers + ast.id = nil + ast.resolvedname = nil + + -- undo infer_values + ast.value = nil + ast.valueself = nil + ast.valuelist = nil + ast.isdead = nil -- via get_func_returns + ast.isvaluepegged = nil + + -- undo walk setting ast.seevalue + ast.seevalue = nil + + -- undo walk setting ast.definedglobal + ast.definedglobal = nil + + -- undo notes + ast.note = nil + + ast.nocollect = nil + end) + + -- undo infer_values + top_ast.valueglobals = nil +end + + +-- Main inspection routine. Inspects top_ast/tokenlist. +-- Error/status messages are sent to function `report`. +-- CATEGORY: code interpretation +function M.inspect(top_ast, tokenlist, src, report) + --DEBUG: local t0 = os.clock() + if not report then -- compat for older version of lua-inspect + assert('inspect signature changed; please upgrade your code') + end + + report = report or function() end + + local globals = LG.globals(top_ast) + + M.mark_identifiers(top_ast) + + M.eval_comments(top_ast, tokenlist, report) + + M.infer_values(top_ast, tokenlist, src, report) + M.infer_values(top_ast, tokenlist, src, report) -- two passes to handle forward declarations of globals (IMPROVE: more passes?) + + -- Make some nodes as having values related to its parent. + -- This allows clicking on `bar` in `foo.bar` to display + -- the value of `foo.bar` rather than just "bar". + LA.walk(top_ast, function(ast) + if ast.tag == 'Index' then + ast[2].seevalue = ast + elseif ast.tag == 'Invoke' then + ast[2].seevalue = {value=ast.valueself, parent=ast} + end + end) + + local function eval_name_helper(name) + local var = _G + for part in (name .. '.'):gmatch("([^.]*)%.") do + part = part:gsub('%%(.)', unescape) + if type(var) ~= 'table' and type(var) ~= 'userdata' then return nil end --TODO:improve? + var = var[part] + if var == nil then return nil end + end + return var + end + local function eval_name(name) + local ok, o = pzcall(eval_name_helper, {}, name) + if ok then return o else return nil end + end + + LA.walk(top_ast, function(ast) + if ast.tag == 'Id' or ast.isfield then + local vname = ast[1] + --TODO: rename definedglobal to definedfield for clarity + local atype = ast.localdefinition and 'local' or ast.isfield and 'field' or 'global' + local definedglobal = ast.resolvedname and eval_name(ast.resolvedname) ~= nil or + atype == 'global' and (globals[vname] and globals[vname].set) or nil + ast.definedglobal = definedglobal + -- FIX: _G includes modules imported by inspect.lua, which is not desired + elseif ast.tag == 'Call' or ast.tag == 'Invoke' then + -- Argument count check. + local value = ast.valueself or ast[1].value + local info = M.debuginfo[value] + local fast = info and info.fast + if fast or LS.argument_counts[value] then + local nparammin, nparammax + if fast then + nparammin, nparammax = function_param_range(info.fast) + else + nparammin, nparammax = unpack(LS.argument_counts[value]) + end + local nargmin, nargmax = call_arg_range(ast) + --print('DEBUG:', nparammin, nparammax, nargmin, nargmax) + local iswarn + local target_ast = ast.tag == 'Call' and ast[1] or ast[2] + if (nargmax or math.huge) < nparammin then + ast.note = "Too few arguments. " + iswarn = true + elseif nargmin > (nparammax or math.huge) then + ast.note = "Too many arguments. " + iswarn = true + end + if iswarn then + ast.note = ast.note .. "Expected " + .. nparammin .. (nparammax == nparammin and "" or " to " .. (nparammax or "infinity")) + .. " but got " + .. nargmin .. (nargmax == nargmin and "" or " to " .. (nargmax or "infinity")) .. "." + end + end + end + end) +end + + +-- Resolves identifier to value [*] +function M.resolve_id(id, scope, valueglobals, _G) + local val + if scope[id] then + val = scope[id].value + elseif valueglobals[id] ~= nil then + val = valueglobals[id] + else + val = _G[id] -- assumes not raise + end + return val +end + +-- Resolves prefix chain expression to value. [*] +-- On error returns nil and error object +function M.resolve_prefixexp(ids, scope, valueglobals, _G) + local _1 = M.resolve_id(ids[1], scope, valueglobals, _G) + local ok, err = pzcall(function() + for i=2,#ids do + _1 = _1[ids[i]] + end + end, {}) + if err then return nil, err or '?' end + return _1 +end + +-- Gets local scope at given 1-indexed char position +function M.get_scope(pos1, ast, tokenlist) + local mast, isafter = LA.current_statementblock(ast, tokenlist, pos1) + local scope = LG.variables_in_scope(mast, isafter) + return scope +end + +-- Gets names in prefix expression ids (as returned by resolve_prefixexp). [*] +function M.names_in_prefixexp(ids, pos, ast, tokenlist) + local scope = M.get_scope(pos, ast, tokenlist) + --FIX: above does not handle `for x=1,2 do| print(x) end` where '|' is cursor position. + local names = {} + if #ids == 0 then -- global + for name in pairs(scope) do names[#names+1] = name end + for name in pairs(ast.valueglobals) do names[#names+1] = name end + for name in pairs(_G) do names[#names+1] = name end + else -- field + local t, err_ = M.resolve_prefixexp(ids, scope, ast.valueglobals, _G) + if type(t) == 'table' then -- note: err_ implies false here + for name in pairs(t) do names[#names+1] = name end + end + end + return names +end + +-- Gets signature (function argument string or helpinfo string) on value. +-- Returns nil on not found. +function M.get_signature_of_value(value) + local info = M.debuginfo[value] -- first try this + if info and info.fast then + local fidx, lidx = LA.ast_idx_range_in_tokenlist(info.tokenlist, info.fast[1]) + local ts = {} + if fidx then + for i=fidx,lidx do + local token = info.tokenlist[i] + ts[#ts+1] = token.tag == 'Dots' and '...' or token[1] + end + end + local sig = 'function(' .. table.concat(ts, ' ') .. ')' + if info.retvals then + local vals = info.retvals + local ts = {} + if vals.n == 0 then + sig = sig .. " no returns" + else + for i=1,vals.n do local val = vals[i] + ts[#ts+1] = T.istype[val] and tostring(val) or LD.dumpstring(val) --Q:dumpstring too verbose? + end + sig = sig .. " returns " .. table.concat(ts, ", ") + end + end + return sig + end + local sig = LS.value_signatures[value] -- else try this + return sig +end + + +-- Gets signature (function argument string or helpinfo string) on variable ast. +-- Returns nil on not found. +function M.get_signature(ast) + if known(ast.value) then + return M.get_signature_of_value(ast.value) + end +end + + +-- Gets 1-indexed character (or line) position and filename of +-- definition associated with AST node (if any). +function M.ast_to_definition_position(ast, tokenlist) + local local_ast = ast.localdefinition + local fpos, fline, path + if local_ast then + local tidx = LA.ast_idx_range_in_tokenlist(tokenlist, local_ast) + if tidx then + local spath = ast.lineinfo.first[4] -- a HACK? using lineinfo + fpos = tokenlist[tidx].fpos; path = spath + end + end + if not fpos then + local valueast = ast.seevalue or ast + local val = valueast and valueast.value + local info = M.debuginfo[val] or type(val) == 'function' and debug.getinfo(val) + if info then + if info.source:match'^@' then + path = info.source:match'@(.*)' + if info.linedefined then + fline = info.linedefined + else + fpos = info.fpos + end + end + end + end + return fpos, fline, path +end + + +-- Returns true iff value in ast node is known in some way. +function M.is_known_value(ast) + local vast = ast.seevalue or ast + return vast.definedglobal or known(vast.value) and vast.value ~= nil +end + + +-- Gets list of variable attributes for AST node. +function M.get_var_attributes(ast) + local vast = ast.seevalue or ast + local attributes = {} + if ast.localdefinition then + attributes[#attributes+1] = "local" + if ast.localdefinition.functionlevel < ast.functionlevel then + attributes[#attributes+1] = 'upvalue' + end + if ast.localdefinition.isparam then + attributes[#attributes+1] = "param" + end + if not ast.localdefinition.isused then attributes[#attributes+1] = 'unused' end + if ast.isignore then attributes[#attributes+1] = 'ignore' end + if ast.localdefinition.isset then attributes[#attributes+1] = 'mutatebind' + else attributes[#attributes+1] = 'constbind' end + if ast.localmasking then + attributes[#attributes+1] = "masking" + end + if ast.localmasked then + attributes[#attributes+1] = "masked" + end + elseif ast.tag == 'Id' then -- global + attributes[#attributes+1] = (M.is_known_value(vast) and "known" or "unknown") + attributes[#attributes+1] = "global" + elseif ast.isfield then + attributes[#attributes+1] = (M.is_known_value(vast) and "known" or "unknown") + attributes[#attributes+1] = "field" + else + attributes[#attributes+1] = "FIX" -- shouldn't happen? + end + if vast.parent and (vast.parent.tag == 'Call' or vast.parent.tag == 'Invoke') + and vast.parent.note + then + attributes[#attributes+1] = 'warn' + end + return attributes +end + + +-- Gets detailed information about value in AST node, as string. +function M.get_value_details(ast, tokenlist, src) + local lines = {} + + if not ast then return '?' end + + local vast = ast.seevalue or ast + + lines[#lines+1] = "attributes: " .. table.concat(M.get_var_attributes(ast), " ") + + lines[#lines+1] = "value: " .. tostring(vast.value) + + local sig = M.get_signature(vast) + if sig then + local kind = sig:find '%w%s*%b()$' and 'signature' or 'description' + lines[#lines+1] = kind .. ": " .. sig + end + + local fpos, fline, path = M.ast_to_definition_position(ast, tokenlist) + if fpos or fline then + local fcol + if fpos then + fline, fcol = LA.pos_to_linecol(fpos, src) + end + local location = path .. ":" .. (fline) .. (fcol and ":" .. fcol or "") + lines[#lines+1] = "location defined: " .. location + end + + if ast.localdefinition and ast.localmasking then + local fpos = LA.ast_pos_range(ast.localmasking, tokenlist) + if fpos then + local linenum = LA.pos_to_linecol(fpos, src) + lines[#lines+1] = "masking definition at line: " .. linenum + end + end + + -- Render warning notes attached to calls/invokes. + local note = vast.parent and (vast.parent.tag == 'Call' or vast.parent.tag == 'Invoke') + and vast.parent.note + if note then + lines[#lines+1] = "WARNING: " .. note + end + + return table.concat(lines, "\n") +end + + +-- Gets list of all warnings, as strings. +-- In HTML Tidy format (which supports column numbers in SciTE, although is +-- slightly verbose and lacks filename). +function M.list_warnings(tokenlist, src) + local warnings = {} + local ttoken + local function warn(msg) + local linenum, colnum = LA.pos_to_linecol(ttoken.fpos, src) + warnings[#warnings+1] = "line " .. linenum .. " column " .. colnum .. " - " .. msg + end + local isseen = {} + for i,token in ipairs(tokenlist) do ttoken = token + if token.ast then + local ast = token.ast + if ast.localmasking then + local pos = LA.ast_pos_range(ast.localmasking, tokenlist) + local linenum = pos and LA.pos_to_linecol(pos, src) + warn("local " .. ast[1] .. " masks another local" .. (pos and " on line " .. linenum or "")) + end + if ast.localdefinition == ast and not ast.isused and not ast.isignore then + warn("unused local " .. ast[1]) + end + if ast.isfield and not(known(ast.seevalue.value) and ast.seevalue.value ~= nil) then + warn("unknown field " .. ast[1]) + elseif ast.tag == 'Id' and not ast.localdefinition and not ast.definedglobal then + warn("unknown global " .. ast[1]) + end + local vast = ast.seevalue or ast + local note = vast.parent and (vast.parent.tag == 'Call' or vast.parent.tag == 'Invoke') + and vast.parent.note + if note and not isseen[vast.parent] then + isseen[vast.parent] = true + local esrc = LA.ast_to_text(vast.parent, tokenlist, src) + -- IMPROVE: large items like `f(function() ... end)` may be shortened. + warn(note .. (esrc and "for " .. esrc or "")) + end + end + end + return warnings +end + + +return M diff --git a/builders/lua-inspect/lib/luainspect/scite.lua b/builders/lua-inspect/lib/luainspect/scite.lua new file mode 100644 index 000000000..b112cf2da --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/scite.lua @@ -0,0 +1,1591 @@ +--[[ + luainspect.scite - SciTE text editor plugin + (c) 2010 David Manura, MIT License. + + == Background Comments == + + The interaction between SciTE add-ons like lexers and extensions, + including various Lua and C++ formulations of these, may be confusing + at first, so here's a summary. + + SciTE has an "extension interface" [1], which allows you to write C++ + modules that hook into SciTE events on a global level. SciTE comes + with two built-in extensions. The multiplexing extension + (MultiplexExtension.cxx) allows you to plug-in more than one + extension. The Lua extension (LuaExtension.cxx) allows you to write + an extension with Lua scripts [2] rather than C++. Extensions in Lua + and C++ are fairly similar, but there is an "extension." + property that "is part of the generic SciTE Extension Interface but + is currently only used by the Lua Scripting Extension" [3] and that + allows an extension script to be applied only when the active buffer + is of a specific file type or directory (rather than globally). + These are called "Lua extension scripts" in contrast to (global) "Lua + startup scripts" ("ext.lua.startup.script" property). Handler + functions in the Lua extension scripts override global handlers in + the Lua startup script. Lua extension scripts supposedly provide a + standard and user-configurable way to apply extensions to specific + languages. + + Scintilla (not just SciTE) also supports lexers [4-5], which are + traditionally implemented in C++ (e.g. LexLua.cxx) and can be enabled + by the user for specific file types (rather than globally) via the + "lexer." property. Lexers can also be written in Lua + scripts [6] (i.e. OnStyle handler), via the Lua extension interface, + apparently either as Lua startup scripts or Lua extension scripts. + This differs from C++ lexers, which are not loaded via the extension + interface. Lexers are a Scintilla concept. Extensions are a SciTE + concept. + + LuaInspect is both a lexer and an extension. It does both + syntax highlighting (lexer) as well as event handling (extension) to + support intelligent behavior and analysis. LuaInspect also applies + only to Lua files (not globally) and it is implemented in Lua (not + C++). These characteristics entail that LuaInspect be a Lua extension + script. There is one exception though mentioned in the comments above + the scite.lua M.install() function in that certain initialization + actions are best handled early via a Lua startup script, so scite.lua + is called both as a startup script and extension script to do different + actions (although the mechanism is a bit awkward). You could have + LuaInspect operate entirely as a Lua startup script, but that + could interfere when editing non-Lua files. + + The fact that SciTE reloads extensions scripts on buffer swaps + is probably unnecessary but outside of our control. In any case, + overhead should be low. Note that the AST and token lists are cached + in the buffer object, which persists across buffer swaps, so the + really expensive parsing is avoided on buffer swaps. + + There is also SciTE ExtMan [7], which is normally (always?) loaded + as a Lua startup script. This provides various global utility + functions, as well as a mechanism to multiplex multiple Lua startup + scripts. LuaInspect does not use the latter, implementing instead + it's own install_handler mechanism, because LuaInspect is involved + in Lua extension scripts rather than Lua startup scripts. + install_handler is careful though to ensure that global handlers + in any Lua startup script (including ExtMan handlers) are still called. + + [1] http://www.scintilla.org/SciTEExtension.html + [2] http://www.scintilla.org/SciTELua.html + [3] http://www.scintilla.org/SciTEDoc.html + [4] http://www.scintilla.org/SciTELexer.html + [5] http://www.scintilla.org/ScintillaDoc.html#LexerObjects + [6] http://www.scintilla.org/ScriptLexer.html + [7] http://lua-users.org/wiki/SciteExtMan +]] + + +-- Whether to update the AST on every edit (true) or only when the selection +-- is moved to a different line (false). false can be more efficient for large files. +local UPDATE_ALWAYS = scite_GetProp('luainspect.update.always', '1') == '1' + +-- Styling will be delayed for DELAY_COUNT styling events following user typing. +-- However it will be immediately triggered on a cursor or line change. +-- 0 implies always style. Increase to improve performance but delay display update. +local UPDATE_DELAY = math.max(1, tonumber(scite_GetProp('luainspect.update.delay', '5'))) + +-- When user edits code, recompile only the portion of code that is edited. +-- This can improve performance and normally should be true unless you find problems. +local INCREMENTAL_COMPILATION = scite_GetProp('luainspect.incremental.compilation', '1') == '1' + +-- Whether to run timing tests (for internal development purposes). +local PERFORMANCE_TESTS = scite_GetProp('luainspect.performance.tests', '0') == '1' + +-- Experimental feature: display types/values of all known locals as annotations. +-- Allows Lua to be used like a Mathcad worksheet. +local ANNOTATE_ALL_LOCALS = scite_GetProp('luainspect.annotate.all.locals', '0') == '1' + +-- WARNING: experimental and currently buggy. +-- Auto-completes variables. +local AUTOCOMPLETE_VARS = scite_GetProp('luainspect.autocomplete.vars', '0') == '1' + +-- WARNING: experimental and currently buggy. +-- Auto-completes syntax. Like http://lua-users.org/wiki/SciteAutoExpansion . +local AUTOCOMPLETE_SYNTAX = scite_GetProp('luainspect.autocomplete.syntax', '0') == '1' + +-- Paths to append to package.path and package.cpath. +local PATH_APPEND = scite_GetProp('luainspect.path.append', '') +local CPATH_APPEND = scite_GetProp('luainspect.cpath.append', '') + +-- Whether SciTE folding is applied. Normally true. +local ENABLE_FOLDING = false -- disabled since still occasionally triggers OnStyle recursion problem. + +-- Base color scheme. +-- sciteGetProp('style.script_lua.scheme') 'dark' or 'light' (same as '') + +local LI = require "luainspect.init" +local LA = require "luainspect.ast" +local LD = require "luainspect.dump" +local T = require "luainspect.types" + +local M = {} + +--! require 'luainspect.typecheck' (context) + +-- variables stored in `buffer`: +-- ast -- last successfully compiled AST +-- src -- source text corresponding to `ast` +-- lastsrc -- last attempted `src` (might not be successfully compiled) +-- tokenlist -- tokenlist corresponding to `ast` +-- lastline - number of last line in OnUpdateUI (only if not UPDATE_ALWAYS) + + +-- Performance test utilities. Enabled only for PERFORMANCE_TESTS. +local perf_names = {} +local perf_times = {os.clock()} +local nilfunc = function(name_) end +local clock = PERFORMANCE_TESTS and function(name) + perf_times[#perf_times+1] = os.clock() + perf_names[#perf_names+1] = name +end or nilfunc +local clockbegin = PERFORMANCE_TESTS and function(name) + perf_names = {} + perf_times = {} + clock(name) +end or nilfunc +local clockend = PERFORMANCE_TESTS and function(name) + clock(name) + for i=1,#perf_times do + print('DEBUG:clock:', perf_names[i], perf_times[i] - perf_times[1]) + end +end or nilfunc + + +-- Shorten string by replacing any long middle section with "..." +-- CATEGORY: dump +local _pat +local function debug_shorten(s) + local keep_pat = ("."):rep(100) + _pat = _pat or "^(" .. keep_pat .. ").*(" .. keep_pat .. ")$" + return s:gsub(_pat, "%1\n<...>\n%2") +end + +-- CATEGORY: debug +local function DEBUG(...) + if LUAINSPECT_DEBUG then + print('DEBUG:', ...) + end +end + + +-- Style IDs - correspond to style properties +local S_DEFAULT = 0 +local S_LOCAL = 1 +local S_LOCAL_MUTATE = 6 +local S_LOCAL_UNUSED = 7 +local S_LOCAL_PARAM = 8 +local S_LOCAL_PARAM_MUTATE = 16 +local S_UPVALUE = 10 +local S_UPVALUE_MUTATE = 15 +local S_GLOBAL_RECOGNIZED = 2 --Q:rename recognized->known? +local S_GLOBAL_UNRECOGNIZED = 3 +local S_FIELD = 11 +local S_FIELD_RECOGNIZED = 12 +local S_COMMENT = 4 +local S_STRING = 5 +local S_TAB = 13 +local S_KEYWORD = 14 +local S_COMPILER_ERROR = 9 +local STYLES = {} +STYLES.default = S_DEFAULT +STYLES['local'] = S_LOCAL +STYLES.local_mutate = S_LOCAL_MUTATE +STYLES.local_unused = S_LOCAL_UNUSED +STYLES.local_param = S_LOCAL_PARAM +STYLES.local_param_mutate = S_LOCAL_PARAM_MUTATE +STYLES.upvalue = S_UPVALUE +STYLES.upvalue_mutate = S_UPVALUE_MUTATE +STYLES.global_recognized = S_GLOBAL_RECOGNIZED +STYLES.global_unrecognized = S_GLOBAL_UNRECOGNIZED +STYLES.field = S_FIELD +STYLES.field_recognized = S_FIELD_RECOGNIZED +STYLES.comment = S_COMMENT +STYLES.string = S_STRING +STYLES.tab = S_TAB +STYLES.keyword = S_KEYWORD +STYLES.compiler_error = S_COMPILER_ERROR +STYLES.indic_fore = 'indic_fore' +STYLES.indic_style = 'indic_style' + + +-- Marker for range of lines with invalidated code that doesn't parse. +local MARKER_ERROR = 0 +-- Markers for lines of variable scope or block. +local MARKER_SCOPEBEGIN = 8 +local MARKER_SCOPEMIDDLE = 2 +local MARKER_SCOPEEND = 3 +-- Marker for specific line with parser error. +local MARKER_ERRORLINE = 4 +-- Marker displayed to alter user that syntax highlighting has been delayed +-- during user typing. +local MARKER_WAIT = 5 +-- Marker displayed next to local definition that is masked by selected local definition. +local MARKER_MASKED = 6 +-- Marker displayed next to local definition masking another local defintion. +local MARKER_MASKING = 7 +-- note: marker 1 used for bookmarks + +-- Indicator for syntax or other errors +local INDICATOR_ERROR = 0 +-- Indicator for variable instances in scope. +local INDICATOR_SCOPE = 1 +-- Indicator for related keywords in block. +local INDICATOR_KEYWORD = 2 +-- Indicator or locals masking other locals (name conflict). +local INDICATOR_MASKING = 3 +-- Indicator for autocomplete characters (typing over them is ignored). +local INDICATOR_AUTOCOMPLETE = 4 +-- Indicator or locals masked by other locals (name conflict). +local INDICATOR_MASKED = 5 +-- Indicator for warnings. +local INDICATOR_WARNING = 6 +-- Indicator for dead-code +local INDICATOR_DEADCODE = 7 + +-- Display annotations. +-- Used for ANNOTATE_ALL_LOCALS feature. +-- CATEGORY: SciTE GUI + AST +local function annotate_all_locals() + -- Build list of annotations. + local annotations = {} + for i=1,#buffer.tokenlist do + local token = buffer.tokenlist[i] + if token.ast.localdefinition == token.ast then + local info = LI.get_value_details(token.ast, buffer.tokenlist, buffer.src) + local linenum0 = editor:LineFromPosition(token.lpos-1) + annotations[linenum0] = (annotations[linenum0] or "") .. "detail: " .. info + end + end + -- Apply annotations. + editor.AnnotationVisible = ANNOTATION_BOXED + for linenum0=0,table.maxn(annotations) do + if annotations[linenum0] then + editor.AnnotationStyle[linenum0] = S_DEFAULT + editor:AnnotationSetText(linenum0, annotations[linenum0]) + end + end +end + + +-- Warning/status reporting function. +-- CATEGORY: SciTE GUI + reporting + AST +local report = print + + +-- Attempts to update AST from editor text and apply decorations. +-- CATEGORY: SciTE GUI + AST +local function update_ast() + -- Skip update if text unchanged. + local newsrc = editor:GetText() + if newsrc == buffer.lastsrc then + return false + end + buffer.lastsrc = newsrc + clockbegin 't1' + + local err, linenum, colnum, linenum2 + + -- Update AST. + local errfpos0, errlpos0 + if newsrc == buffer.src then -- returned to previous good version + -- note: nothing to do besides display + else + -- note: loadstring and metalua don't parse shebang + local newmsrc = LA.remove_shebang(newsrc) + + -- Quick syntax check. + -- loadstring is much faster than Metalua, so try that first. + -- Furthermore, Metalua accepts a superset of the Lua grammar. + local f; f, err, linenum, colnum, linenum2 = LA.loadstring(newmsrc) + + -- Analyze code using LuaInspect, and apply decorations + if f then + -- Select code to compile. + local isincremental = INCREMENTAL_COMPILATION and buffer.ast + local pos1f, pos1l, pos2f, pos2l, old_ast, old_type, compilesrc + if isincremental then + pos1f, pos1l, pos2f, pos2l, old_ast, old_type = + LA.invalidated_code(buffer.ast, buffer.tokenlist, LA.remove_shebang(buffer.src), newmsrc) + compilesrc = newmsrc:sub(pos2f,pos2l) + DEBUG('inc', pos1f, pos1l, pos2f, pos2l, old_ast, old_type ) + DEBUG('inc-compile:[' .. debug_shorten(compilesrc) .. ']', old_ast and (old_ast.tag or 'notag'), old_type, pos1f and (pos2l - pos1l), pos1l, pos2f) + else + compilesrc = newmsrc + end + clock 't2' + + -- Generate AST. + local ast + if old_type ~= 'whitespace' then + --currently not needed: compilesrc = compilesrc .. '\n' --FIX:Workaround:Metalua:comments not postfixed by '\n' ignored. + ast, err, linenum, colnum, linenum2 = LA.ast_from_string(compilesrc, props.FilePath) + --DEBUG(table.tostring(ast, 20)) + end + clock 't3' + + if err then + print "warning: metalua failed to compile code that compiles with loadstring. error in metalua?" + else + local tokenlist = ast and LA.ast_to_tokenlist(ast, compilesrc) + -- note: ast nil if whitespace + --print(LA.dump_tokenlist(tokenlist)) + + + buffer.src = newsrc + if isincremental and old_type ~= 'full' then + -- Adjust line numbers. + local delta = pos2l - pos1l + LA.adjust_lineinfo(buffer.tokenlist, pos1l, delta) + if ast then + LA.adjust_lineinfo(tokenlist, 1, pos2f-1) + end + + -- Inject AST + if old_type == 'whitespace' then + -- nothing + elseif old_type == 'comment' then + assert(#tokenlist == 1 and tokenlist[1].tag == 'Comment') -- replacing with comment + local newcommenttoken = tokenlist[1] + local token = old_ast + token.fpos, token.lpos, token[1], token[4] = + newcommenttoken.fpos, newcommenttoken.lpos, newcommenttoken[1], newcommenttoken[4] + else assert(old_type == 'statblock') + LA.replace_statements(buffer.ast, buffer.tokenlist, old_ast, ast, tokenlist) + end + + if not(old_type == 'comment' or old_type == 'whitespace') then + LI.uninspect(buffer.ast) + LI.inspect(buffer.ast, buffer.tokenlist, buffer.src, report) --IMPROVE: don't do full inspection + end + else --full + -- old(FIX-REMOVE?): careful: if `buffer.tokenlist` variable exists in `newsrc`, then + -- `LI.inspect` may attach its previous value into the newly created + -- `buffer.tokenlist`, eventually leading to memory overflow. + + buffer.tokenlist = tokenlist + buffer.ast = ast + LI.inspect(buffer.ast, buffer.tokenlist, buffer.src, report) + end + if LUAINSPECT_DEBUG then + DEBUG(LA.dump_tokenlist(buffer.tokenlist)) + DEBUG(LD.dumpstring(buffer.ast)) + --DEBUG(table.tostring(buffer.ast, 20)) + end + end + else + -- Locate position range causing error. + if buffer.ast then + local pos1f, pos1l, pos2f, pos2l, old_ast, old_type = + LA.invalidated_code(buffer.ast, buffer.tokenlist, LA.remove_shebang(buffer.src), newmsrc, true) + errfpos0, errlpos0 = pos2f-1, pos2l-1 + end + end + end + clockend 't4' + + -- Apply styling + if err then + local pos = linenum and editor:PositionFromLine(linenum-1) + colnum - 1 + --old: editor:CallTipShow(pos, err) + --old: editor:BraceHighlight(pos,pos) -- highlight position of error (hack: using brace highlight) + editor.IndicatorCurrent = INDICATOR_ERROR + editor:IndicatorClearRange(0, editor.Length) + editor:IndicatorFillRange(pos, 1) --IMPROVE:mark entire token? + editor:MarkerDeleteAll(MARKER_ERRORLINE) + editor:MarkerAdd(linenum-1, MARKER_ERRORLINE) + editor:AnnotationClearAll() + editor.AnnotationVisible = ANNOTATION_BOXED + local errlinenum0 = errfpos0 and editor:LineFromPosition(errlpos0+1) or linenum-1 + -- note: +1 to avoid error message moving above cursor on pressing Enter. + editor.AnnotationStyle[errlinenum0] = S_COMPILER_ERROR + editor:AnnotationSetText(errlinenum0, "error " .. err) + if linenum2 then -- display error in two locations + --old:editor.AnnotationStyle[linenum2-1] = S_COMPILER_ERROR + -- editor:AnnotationSetText(linenum2-1, "error " .. err) + editor:MarkerAdd(linenum2-1, MARKER_ERRORLINE) + end + + -- Indicator over invalidated position range causing error. + if errfpos0 then + --unused: editor.IndicatorCurrent = INDICATOR_INVALIDATED + -- editor:IndicatorClearRange(INDICATOR_INVALIDATED, editor.Length) + -- editor:IndicatorFillRange(errfpos0, errlpos0-errfpos0+1) + for line0=editor:LineFromPosition(errfpos0), editor:LineFromPosition(errlpos0) do + editor:MarkerAdd(line0, MARKER_ERROR) + end + end + else + + --old: editor:CallTipCancel() + editor.IndicatorCurrent = INDICATOR_ERROR + editor:IndicatorClearRange(0, editor.Length) + editor:MarkerDeleteAll(MARKER_ERRORLINE) + editor:AnnotationClearAll() + --unused: editor.IndicatorCurrent = INDICATOR_INVALIDATED + -- editor:IndicatorClearRange(0, editor.Length) + editor:MarkerDeleteAll(MARKER_ERROR) + + if ANNOTATE_ALL_LOCALS then annotate_all_locals() end + end + + -- Do auto-completion. + -- WARNING:FIX:the implementations here are currently rough. + if AUTOCOMPLETE_SYNTAX and errfpos0 then + editor.IndicatorCurrent = INDICATOR_AUTOCOMPLETE + --DEBUG(buffer.lastsrc) + local ssrc = buffer.lastsrc:sub(errfpos0+1, errlpos0+1) + + if ssrc == "if " then + local more = " then end" + editor:InsertText(errlpos0+1, more) + editor:IndicatorFillRange(errlpos0+1, #more) + end + if ssrc:match'^[^"]*"[^"]*$' then + local more = '"' + editor:InsertText(errlpos0+1, more) + editor:IndicatorFillRange(errlpos0+1, #more) + end + if ssrc:match'%{[^%}]*$' then + more = '}' + editor:InsertText(errlpos0+1, more) + editor:IndicatorFillRange(errlpos0+1, #more) + end + if ssrc:match'%([^%)]*$' then + more = ')' + editor:InsertText(errlpos0+1, more) + editor:IndicatorFillRange(errlpos0+1, #more) + end + end +end + + +-- Gets token assocated with currently selected variable (if any). +-- CATEGORY: SciTE GUI + AST +local function getselectedvariable() + if buffer.src ~= editor:GetText() then return end -- skip if AST not up-to-date + local selectedtoken + local id + local pos = editor.Anchor+1 + for i,token in ipairs(buffer.tokenlist) do + if pos >= token.fpos and pos <= token.lpos then + if token.ast.id then + selectedtoken = token + id = token.ast.id + end + break + end + end + return selectedtoken, id +end + + +-- Marks in margin range of 0-indexed lines. +-- CATEGORY: SciTE GUI +local function scope_lines(firstline0, lastline0) + if firstline0 ~= lastline0 then -- multiline + --TODO: not rendering exactly as desired. TCORNERCURVE should + -- preferrably be an upside-down LCORNERCURVE; plus the color on TCORNERCURVE is off. + editor:MarkerAdd(firstline0, MARKER_SCOPEBEGIN) + for n=firstline0+1,lastline0-1 do + editor:MarkerAdd(n, MARKER_SCOPEMIDDLE) + end + editor:MarkerAdd(lastline0, MARKER_SCOPEEND) + else -- single line + editor:MarkerAdd(firstline0, MARKER_SCOPEMIDDLE) + end +end + + +-- Marks in margin range of 0-indexed positions. +-- CATEGORY: SciTE GUI +local function scope_positions(fpos0, lpos0) + local firstline0 = editor:LineFromPosition(fpos0) + local lastline0 = editor:LineFromPosition(lpos0) + scope_lines(firstline0, lastline0) +end + + +-- Responds to UI updates. This includes moving the cursor. +-- CATEGORY: SciTE event handler +function M.OnUpdateUI() + -- Disable any autocomplete indicators if cursor moved away. + if AUTOCOMPLETE_SYNTAX then + if editor:IndicatorValueAt(INDICATOR_AUTOCOMPLETE, editor.CurrentPos) ~= 1 then + editor.IndicatorCurrent = INDICATOR_AUTOCOMPLETE + editor:IndicatorClearRange(0, editor.Length) + end + end + + -- This updates the AST when the selection is moved to a different line. + if not UPDATE_ALWAYS then + local currentline = editor:LineFromPosition(editor.Anchor) + if currentline ~= buffer.lastline then + update_ast() + buffer.lastline = currentline + end + end + + if buffer.src ~= editor:GetText() then return end -- skip if AST is not up-to-date + + -- check if selection if currently on identifier + local selectedtoken, id = getselectedvariable() + + --test: adding items to context menu upon variable selection + --if id then + -- props['user.context.menu'] = selectednote.ast[1] .. '|1101' + -- --Q: how to reliably remove this upon a buffer switch? + --end + + -- Highlight all instances of that identifier. + editor:MarkerDeleteAll(MARKER_SCOPEBEGIN) + editor:MarkerDeleteAll(MARKER_SCOPEMIDDLE) + editor:MarkerDeleteAll(MARKER_SCOPEEND) + editor:MarkerDeleteAll(MARKER_MASKED) + editor:MarkerDeleteAll(MARKER_MASKING) + editor.IndicatorCurrent = INDICATOR_SCOPE + editor:IndicatorClearRange(0, editor.Length) + editor.IndicatorCurrent = INDICATOR_MASKED + editor:IndicatorClearRange(0, editor.Length) + if id then + + -- Indicate uses of variable. + editor.IndicatorCurrent = INDICATOR_SCOPE + local ftoken, ltoken -- first and last occurances + for _,token in ipairs(buffer.tokenlist) do + if token.ast.id == id then + ltoken = token + if not ftoken then ftoken = token end + editor:IndicatorFillRange(token.fpos-1, token.lpos-token.fpos+1) + end + end + + scope_positions(ftoken.fpos-1, ltoken.lpos-1) + + -- identify any local definition masked by any selected local definition. + local ast = selectedtoken -- cast: `Id tokens are AST nodes. + if ast.localmasking and not ast.isignore then + local fpos, lpos = LA.ast_pos_range(ast.localmasking, buffer.tokenlist) + if fpos then + local maskedlinenum0 = editor:LineFromPosition(fpos-1) + local maskinglinenum0 = editor:LineFromPosition(selectedtoken.fpos-1) + editor:MarkerAdd(maskedlinenum0, MARKER_MASKED) + editor:MarkerAdd(maskinglinenum0, MARKER_MASKING) + editor.IndicatorCurrent = INDICATOR_MASKED + editor:IndicatorFillRange(fpos-1, lpos-fpos+1) + end + end + end + + -- Highlight related keywords. + do + editor.IndicatorCurrent = INDICATOR_KEYWORD + editor:IndicatorClearRange(0, editor.Length) + + -- Check for selection over statement or expression. + local fpos, lpos = editor.Anchor, editor.CurrentPos + if lpos < fpos then fpos, lpos = lpos, fpos end -- swap + fpos, lpos = fpos + 1, lpos + 1 - 1 + local match1_ast, match1_comment, iswhitespace = + LA.smallest_ast_containing_range(buffer.ast, buffer.tokenlist, fpos, lpos) + -- DEBUG('m', match1_ast and match1_ast.tag, match1_comment, iswhitespace) + + -- Find and highlight. + local keywords; keywords, match1_ast = LI.related_keywords(match1_ast, buffer.ast, buffer.tokenlist, buffer.src) + if keywords then + for i=1,#keywords do + local fpos, lpos = keywords[i].fpos, keywords[i].lpos + editor:IndicatorFillRange(fpos-1, lpos-fpos+1) + end + end + + -- Mark range of lines covered by item on selection. + if not id then + local fpos, lpos = LA.ast_pos_range(match1_ast, buffer.tokenlist) + if fpos then scope_positions(fpos-1, lpos-1) end + end + end + + + --[[ + -- Display callinfo help on function. + if selectednote and selectednote.ast.resolvedname and LS.global_signatures[selectednote.ast.resolvedname] then + local name = selectednote.ast.resolvedname + editor:CallTipShow(editor.Anchor, LS.global_signatures[name]) + else + --editor:CallTipCancel() + end + ]] +end + + +-- Responds to requests for restyling. +-- Note: if StartStyling is not applied over the entire requested range, than this function is quickly recalled +-- (which possibly can be useful for incremental updates) +-- CATEGORY: SciTE event handler +local style_delay_count = 0 +local isblock = {Function=true} +local debug_recursion = 0 +function M.OnStyle(styler) + assert(styler.language == "script_lua") + + -- Optionally delay styling. + --print('DEBUG:style-count', style_delay_count) + if style_delay_count > 0 then + -- Dislpay wait marker if not displayed and new text parsing not yet attempted. + if not buffer.wait_marker_line and editor:GetText() ~= buffer.lastsrc then + buffer.wait_marker_line = editor:LineFromPosition(editor.CurrentPos) + editor:MarkerDeleteAll(MARKER_WAIT) + editor:MarkerAdd(buffer.wait_marker_line, MARKER_WAIT) + style_delay_count = style_delay_count + 1 + -- +1 is hack to work around warning described below. + end + style_delay_count = style_delay_count - 1 + return + elseif style_delay_count == 0 then + if buffer.wait_marker_line then + editor:MarkerDeleteAll(MARKER_WAIT) + buffer.wait_marker_line = nil + end + end + style_delay_count = UPDATE_DELAY + -- WARNING: updating marker causes another style event to be called immediately. + -- Therefore, we take care to only update marker when marker state needs changed + -- and correct the count when we do. + + --IMPROVE: could metalua libraries parse text across multiple calls to + --`OnStyle` to reduce long pauses with big files? Maybe use coroutines. + + --DEBUG("style",styler.language, styler.startPos, styler.lengthDoc, styler.initStyle) + + -- update AST if needed + if UPDATE_ALWAYS then + update_ast() + elseif not buffer.lastsrc then + -- this ensures that AST compiling is attempted when file is first loaded since OnUpdateUI + -- is not called on load. + update_ast() + end + + --DEBUG('OnStyle', editor:LineFromPosition(styler.startPos), editor:LineFromPosition(styler.startPos+styler.lengthDoc), styler.initStyle) + if buffer.src ~= editor:GetText() then return end -- skip if AST not up-to-date + -- WARNING: SciTE will repeatedly call OnStyle until StartStyling is performed. + -- However, StartStyling/Forward/EndStyling clears styles in the given range, + -- but we prefer to leave the styles as is. + + debug_recursion = debug_recursion + 1 + if debug_recursion ~= 1 then print('warning: OnStyle recursion', debug_recursion) end + -- folding previously triggered recursion leading to odd effects; make sure this is gone + + -- Apply SciTE styling + editor.StyleHotSpot[S_LOCAL] = true + editor.StyleHotSpot[S_LOCAL_MUTATE] = true + editor.StyleHotSpot[S_LOCAL_UNUSED] = true + editor.StyleHotSpot[S_LOCAL_PARAM] = true + editor.StyleHotSpot[S_LOCAL_PARAM_MUTATE] = true + editor.StyleHotSpot[S_UPVALUE] = true + editor.StyleHotSpot[S_UPVALUE_MUTATE] = true + editor.StyleHotSpot[S_GLOBAL_RECOGNIZED] = true + editor.StyleHotSpot[S_GLOBAL_UNRECOGNIZED] = true + editor.StyleHotSpot[S_FIELD] = true + editor.StyleHotSpot[S_FIELD_RECOGNIZED] = true + -- note: SCN_HOTSPOTCLICK, SCN_HOTSPOTDOUBLECLICK currently aren't + -- implemented by SciTE, although it has been proposed. + + local startpos0, endpos0 = 0, editor.Length -1 + styler:StartStyling(startpos0, endpos0 - startpos0 + 1, 0) + -- local startpos0 = styler.startPos + --styler:StartStyling(styler.startPos, styler.lengthDoc, styler.initStyle) + -- a partial range like this doesn't work right since variables outside of edited range + -- may need styling adjusted (e.g. a local variable definition that becomes unused) + + local i=startpos0+1 + local tokenidx = 1 + local token = buffer.tokenlist[tokenidx] + local function nexttoken() tokenidx = tokenidx+1; token = buffer.tokenlist[tokenidx] end + while styler:More() do + while token and i > token.lpos do + nexttoken() + end + + if token and i >= token.fpos and i <= token.lpos then + local ast = token.ast + if token.tag == 'Id' then + if ast.localdefinition then -- local + if not ast.localdefinition.isused and not ast.isignore then + styler:SetState(S_LOCAL_UNUSED) + elseif ast.localdefinition.functionlevel < ast.functionlevel then -- upvalue + if ast.localdefinition.isset then + styler:SetState(S_UPVALUE_MUTATE) + else + styler:SetState(S_UPVALUE) + end + elseif ast.localdefinition.isparam then + if ast.localdefinition.isset then + styler:SetState(S_LOCAL_PARAM_MUTATE) + else + styler:SetState(S_LOCAL_PARAM) + end + else + if ast.localdefinition.isset then + styler:SetState(S_LOCAL_MUTATE) + else + styler:SetState(S_LOCAL) + end + end + else -- global + if ast.definedglobal then + styler:SetState(S_GLOBAL_RECOGNIZED) + else + styler:SetState(S_GLOBAL_UNRECOGNIZED) + end + end + elseif ast.isfield then -- implies token.tag == 'String' + local val = ast.seevalue.value + if ast.definedglobal or val ~= T.universal and not T.iserror[val] and val ~= nil then + styler:SetState(S_FIELD_RECOGNIZED) + else + styler:SetState(S_FIELD) + end + elseif token.tag == 'Comment' then + styler:SetState(S_COMMENT) + elseif token.tag == 'String' then -- note: excludes ast.isfield + styler:SetState(S_STRING) + elseif token.tag == 'Keyword' then + styler:SetState(S_KEYWORD) + else + styler:SetState(S_DEFAULT) + end + elseif styler:Current() == '\t' then + styler:SetState(S_TAB) + else + styler:SetState(S_DEFAULT) + end + styler:Forward() + i = i + #styler:Current() -- support Unicode + end + styler:EndStyling() + + -- Apply indicators in token list. + -- Mark masking local variables and warnings. + editor.IndicatorCurrent = INDICATOR_MASKING + editor:IndicatorClearRange(0, editor.Length) + editor.IndicatorCurrent = INDICATOR_WARNING + editor:IndicatorClearRange(0, editor.Length) + editor.IndicatorCurrent = INDICATOR_DEADCODE + editor:IndicatorClearRange(0, editor.Length) + local tokenlist = buffer.tokenlist + for idx=1,#tokenlist do + local token = tokenlist[idx] + local ast = token.ast + if ast and ast.localmasking and not ast.isignore then + editor.IndicatorCurrent = INDICATOR_MASKING + editor:IndicatorFillRange(token.fpos-1, token.lpos - token.fpos + 1) + end + if ast and (ast.seevalue or ast).note then + local hast = ast.seevalue or ast + if hast.tag == 'Call' then hast = hast[1] elseif hast.tag == 'Invoke' then hast = hast[2] end + -- note: for calls only highlight function name + local fpos, lpos = LA.ast_pos_range(hast, buffer.tokenlist) + editor.IndicatorCurrent = INDICATOR_WARNING + editor:IndicatorFillRange(fpos-1, lpos-fpos+1) + end + if ast and ast.isdead then + local fpos, lpos = LA.ast_pos_range(ast, buffer.tokenlist) + editor.IndicatorCurrent = INDICATOR_DEADCODE + editor:IndicatorFillRange(fpos-1, lpos-fpos+1) + end + end + + -- Apply folding. + if ENABLE_FOLDING then + clockbegin 'f1' + local fsline1 = editor:LineFromPosition(startpos0)+1 + local lsline1 = editor:LineFromPosition(endpos0)+1 + --print('DEBUG:+', linea0,lineb0) -- test for recursion + -- IMPROVE: This might be done only over styler.startPos, styler.lengthDoc. + -- Does that improve performance? + local level = 0 + local levels = {} + local plinenum1 = 1 + local firstseen = {} + for _, token in ipairs(buffer.tokenlist) do + -- Fill line numbers up to and including this token. + local llinenum1 = editor:LineFromPosition(token.lpos-1)+1 + -- note: much faster than non-caching LA.pos_to_linecol. + for linenum1=plinenum1,llinenum1 do levels[linenum1] = levels[linenum1] or level end + + -- Monitor level changes and set any header flags. + if token.ast and token.ast.tag == 'Function' then + if not firstseen[token.ast] then + level = level + 1 + firstseen[token.ast] = llinenum1 + elseif token[1] == 'end' then + level = level -1 + local beginlinenum1 = firstseen[token.ast] + if llinenum1 > beginlinenum1 then + local old_value = levels[beginlinenum1] + if old_value < SC_FOLDLEVELHEADERFLAG then + levels[beginlinenum1] = old_value + SC_FOLDLEVELHEADERFLAG + end + end + end + end -- careful: in Metalua, `function` is not always part of the `Function node. + + plinenum1 = llinenum1 + 1 + end + for line1=plinenum1,editor.LineCount do levels[line1] = level end -- fill remaining + --for line1=1,#levels do print('DEBUG:', line1, levels[line1]) end + for line1=1,#levels do -- apply + --for line1=fsline1,lsline1 do -- apply + styler:SetLevelAt(line1-1, levels[line1]) + --Q:why does this seem to sometimes trigger recursive OnStyle calls? (see below). + end + clockend 'f2' + -- Caution: careful folding if StartStyling is performed over a range larger + -- than suggested by startPos/lengthDoc. + -- Note: Folding sometimes tend to trigger OnStyle recursion, leading to odd problems. This + -- seems reduced now but not gone (e.g. load types.lua). + -- The following old comments are left here: + -- # Changing a flag on a line more than once triggers heavy recursion, even stack overflow: + -- # styler:SetLevelAt(0,1) + -- # styler:SetLevelAt(0,1 + SC_FOLDLEVELHEADERFLAG) + -- # Setting levels only on lines being styled may reduce though not eliminate recursion. + -- # Iterating in reverse may reduce though not eliminate recursion. + -- # Disabling folding completely eliminates recursion. + --print'DEBUG:-' -- test for recursion + end + + debug_recursion = debug_recursion - 1 +end + + +-- CATEGORY: SciTE event handler +function M.OnDoubleClick() + if buffer.src ~= editor:GetText() then return end -- skip if AST is not up-to-date + + -- check if selection if currently on identifier + local token = getselectedvariable() + if token and token.ast then + local info = LI.get_value_details(token.ast, buffer.tokenlist, buffer.src) + editor:CallTipShow(token.fpos-1, info) + end +end + + +--TODO:ExtMan: add to extman? Currently extman includes scite_UserListShow wrapping UserListShow +--CAREFUL: must be properly sorted (toupper if AutoCIgnoreCase) +-- CATEGORY: utility, GUI +local function mycshow(list, len) + editor.AutoCSeparator = 1 + editor.AutoCIgnoreCase = true + editor:AutoCShow(len or 0, table.concat(list, '\1')) +end + + +-- Converts object to string (no nesting). +-- CATEGORY: utility function, string +local function dump_shallow(o) + return type(o) == 'string' and string.format('%q', o) or tostring(o) +end + +-- Converts table key to string (no nesting) +-- utility function +local iskeyword_ = { + ['and']=true, ['break']=true, ['do']=true, ['else']=true, ['elseif']=true, + ['end']=true, ['false']=true, ['for']=true, ['function']=true, ['if']=true, + ['in']=true, ['local']=true, ['nil']=true, ['not']=true, ['or']=true, + ['repeat']=true, ['return']=true, ['then']=true, ['true']=true, ['until']=true, ['while']=true +} +local function dump_key_shallow(o) + return type(o) == 'string' and o:match'^[%a_][%w_]*$' and not iskeyword_[o] and o + or "[" .. dump_shallow(o) .. "]" +end + +-- Finds index i such that t[i] == e, else returns nil +-- CATEGORY: utility function, tables +local function tfind(t, e) + for i=1,#t do + if t[i] == e then return i end + end + return nil +end + + +-- Gets array of identifier names in prefix expression preceeding pos0. +-- Attempts even if AST is not up-to-date. +-- warning: very rough, only recognizes simplest cases. A better solution is +-- probably to have the parser return an incomplete AST on failure and use that. +-- CATEGORY: helper, SciTE buffer +local function get_prefixexp(pos0) + local ids = {} + repeat + local fpos0 = editor:WordStartPosition(pos0, true) + local word = editor:textrange(fpos0,pos0) + table.insert(ids, 1, word) + local c = editor:textrange(fpos0-1, fpos0) + pos0 = fpos0-1 + until c ~= '.' and c ~= ':' + return ids +end + + +-- Command to autocomplete current variable or function arguments. +-- CATEGORY: SciTE command and (dual use) helper +function M.autocomplete_variable(_, minchars) + local lpos0 = editor.CurrentPos + local c = editor:textrange(lpos0-1, lpos0) + if c == '(' then -- function arguments + local ids = get_prefixexp(lpos0-1) + if ids[1] ~= '' then + local scope = LI.get_scope(lpos0-1, buffer.ast, buffer.tokenlist) + local o, err = LI.resolve_prefixexp(ids, scope, buffer.ast.valueglobals, _G) + if not err then + local sig = LI.get_signature_of_value(o) + if sig then + editor:CallTipShow(lpos0, sig) + end + end + end + else -- variable + local fpos0 = editor:WordStartPosition(lpos0, true) + if lpos0 - fpos0 >= (minchars or 0) then + local ids = get_prefixexp(editor.CurrentPos) + table.remove(ids) + local names = LI.names_in_prefixexp(ids, lpos0, buffer.ast, buffer.tokenlist) + for i,name in ipairs(names) do names[i] = dump_key_shallow(name) end + --IMPROVE: remove '.' if key must uses square brackets on indexing. + --IMPROVE: For method calls ':', square bracket key isn't support in Lua, so prevent that. + table.sort(names, function(a,b) return a:upper() < b:upper() end) + if #names > 0 then -- display + mycshow(names, lpos0-fpos0) + end + end + end +end + + +-- CATEGORY: SciTE event handler +function M.OnChar(c) + -- FIX: how do we make this event only occur for Lua buffers? + -- Hack below probably won't work with multiple Lua-based lexers. + if editor.Lexer ~= 0 then return end + + -- Auto-complete variable names. + -- note: test ./: not effective + if AUTOCOMPLETE_VARS and + buffer.ast and (not editor:AutoCActive() or c == '.' or c == ':' or c == '(') + then + M.autocomplete_variable(nil, 1) + end + + -- Ignore character typed over autocompleted text. + -- Q: is this the best way to ignore/delete current char? + if AUTOCOMPLETE_SYNTAX and editor:IndicatorValueAt(INDICATOR_AUTOCOMPLETE, editor.CurrentPos) == 1 then + if editor.CharAt[editor.CurrentPos] == editor.CharAt[editor.CurrentPos-1] then + editor.TargetStart = editor.CurrentPos + editor.TargetEnd = editor.CurrentPos+1 + editor:ReplaceTarget("") + else + -- chars typed should not be have autocomplete indicators on them. + editor.IndicatorCurrent = INDICATOR_AUTOCOMPLETE + editor:IndicatorClearRange(editor.CurrentPos-1,1) + end + end +end + + +-- key codes +local KEY_UP, KEY_DOWN, KEY_LEFT, KEY_RIGHT, KEY_ENTER +if scite_GetProp('PLAT_GTK') then + KEY_UP = 65365 + KEY_DOWN = 65364 + KEY_LEFT = 65361 + KEY_RIGHT = 65363 + KEY_ENTER = 65293 +else -- Windows + KEY_UP = 38 + KEY_DOWN = 40 + KEY_LEFT = 37 + KEY_RIGHT = 39 + KEY_ENTER = 13 +end + + +-- CATEGORY: SciTE event handler +function M.OnKey(key) + -- Adjusting styling delays due to user typing. + if key == KEY_UP or key == KEY_DOWN or + key == KEY_LEFT or key == KEY_RIGHT or key == KEY_ENTER + then -- trigger on line/cursor change + style_delay_count = 0 + else -- delay for all other user typing + style_delay_count = UPDATE_DELAY + end + --print('DEBUG:key', key) +end + + +-- CATEGORY: SciTE event handler +function M.OnOpen() + -- Trigger styling immediately on new file open + -- Note: only happens in current buffer; therefore, also do this in OnSwitchFile. + style_delay_count = 0 +end + + +-- CATEGORY: SciTE event handler +function M.OnBeforeSave() + -- Trigger styling immediately before save. + style_delay_count = 0 +end + + +-- CATEGORY: SciTE event handler +function M.OnSwitchFile() + -- Trigger styling immediately on switch buffer so that styling immediately displays. + style_delay_count = 0 +end + + +-- Command for replacing all occurances of selected variable (if any) with given text `newname` +-- Usage in SciTE properties file: +-- CATEGORY: SciTE command +function M.rename_selected_variable(newname) + local selectedtoken = getselectedvariable() + + if selectedtoken and selectedtoken.ast then + local id = selectedtoken.ast.id + editor:BeginUndoAction() + local lasttoken + for i=#buffer.tokenlist,1,-1 do + local token = buffer.tokenlist[i] + local ast = token.ast + if ast and ast.id == id then + editor:SetSel(token.fpos-1, token.lpos) + editor:ReplaceSel(newname) + lasttoken = token + end + end + if lasttoken then + editor:SetSel(lasttoken.fpos-1, lasttoken.fpos + newname:len()) + editor.Anchor = lasttoken.fpos-1 + end + editor:EndUndoAction() + end +end +-- IMPROVE: prevent rename to conflicting existing variable. + + +-- Jumps to 0-indexed line in file path. +-- Preferrably jump to exact position if given, else 0-indexed line. +-- CATEGORY: SciTE helper, navigation +local function goto_file_line_pos(path, line0, pos0) + scite.Open(path) + if pos0 then + editor:GotoPos(pos0) + else + editor:GotoLine(line0) + end +end + + +-- Command for going to definition of selected variable. +-- TODO: currently only works for locals in the same file. +-- CATEGORY: SciTE command +function M.goto_definition() + local selectedtoken = getselectedvariable() + if selectedtoken then + local fpos, fline, path = LI.ast_to_definition_position(selectedtoken.ast, buffer.tokenlist) + if not fline and fpos then + fline = editor:LineFromPosition(fpos-1)+1 + end + if fline then + if set_mark then set_mark() end -- if ctagsdx.lua available + goto_file_line_pos(path, fline and fline-1, fpos and fpos-1) + end + end +end + + +local inspect_queued + +-- Displays value in drop-down list for user inspection of contents. +-- User can navigate in and out of tables, in a stack-like manner. +-- CATEGORY: GUI inspection helper +local function inspect_value(o, prevmenu) + if type(o) == 'table' and (T.istabletype[o] or not T.istype[o]) then + local data = {} + local ok, err = pcall(function() + for k,v in pairs(o) do + local ks = dump_key_shallow(k); if ks:len() > 50 then ks = ks:sub(1,50)..'...' end + local vs = dump_shallow(v); if vs:len() > 50 then vs = vs:sub(1,50)..'...' end + data[#data+1] = {ks .. "=" .. vs, v} + end + end) + local list = {} + if ok then + table.sort(data, function(a,b) return a[1]:upper() < b[1]:upper() end) + -- note: data must be sorted this way under editor.AutoCIgnoreCase==true; + -- otherwise, AutoCSelect will not work properly. + for i=1,#data do list[i] = data[i][1] end + else + data = {} + list[#list+1] = '\tError: Could not read table: ' .. tostring(err) + end + table.insert(list, 1, "\t{" .. (prevmenu and ' (navigate back)' or '')) + table.insert(list, "}") + -- note: \t ensure list is remains sorted. + local selectidx + local function menu() + editor.AutoCIgnoreCase = true + scite_UserListShow(list, 1, function(text) + selectidx = tfind(list, text) + if selectidx then + if text:match'^[%[%"%a_]' then + local val = data[selectidx-1][2] + if type(val) == 'table' then + -- This doesn't work. scite:UserListShow from inside OnUserListSelection + -- has no effect. Q:Why? + --inspect_value(val) + -- workaround: + inspect_queued = function() inspect_value(val, menu) end + scite_MenuCommand('Inspect table contents') + end + else -- go back + if prevmenu then + inspect_queued = prevmenu + scite_MenuCommand('Inspect table contents') + end + end + end + end) + if selectidx then editor.AutoCAutoHide=false; editor:AutoCSelect(list[selectidx]) end + end + menu() + else + scite_UserListShow({dump_shallow(o)}) + end +end + + +-- Command for inspecting fields of selected table variable. +-- CATEGORY: SciTE command +function M.inspect_variable_contents() + if inspect_queued then + local f = inspect_queued; inspect_queued = nil; f() + return + end + local token = getselectedvariable() + if not token or not token.ast then return end + local ast = token.ast + + local iast = ast.seevalue or ast + + if T.istype[iast.value] and not T.istabletype[iast.value] then + scite_UserListShow({"value " .. tostring(iast.value)}) + else + inspect_value(iast.value) + end + -- unfortunately, userdata is not inspectable without 5.2 __pairs. +end + +-- Command to show all uses of selected variable +-- CATEGORY: SciTE command +function M.show_all_variable_uses() + local stoken = getselectedvariable() + if not stoken or not stoken.ast then return end + + local pos0of = {} + + editor.AutoCSeparator = 1 + local infos = {} + for _,token in ipairs(buffer.tokenlist) do + if token.ast and token.ast.id == stoken.ast.id then + local pos0 = token.fpos-1 + local linenum0 = editor:LineFromPosition(pos0) + local linenum1 = linenum0 + 1 + if not pos0of[linenum1] then + pos0of[linenum1] = pos0 + infos[#infos+1] = linenum1 .. ": " .. editor:GetLine(linenum0):gsub("[\r\n]+$", "") + end + end + end + --editor:UserListShow(1, table.concat(infos, "\1")) + scite_UserListShow(infos, 1, function(text) + local linenum1 = tonumber(text:match("^%d+")) + if set_mark then set_mark() end -- if ctagsdx.lua available + editor:GotoPos(pos0of[linenum1]) + end) +end + + +-- Command for forcing redoing of inspection. Note: reloads modules imported via require. +-- CATEGORY: SciTE command +function M.force_reinspect() + if buffer.ast then + LI.uninspect(buffer.ast) + LI.clear_cache() + collectgarbage() -- note package.loaded was given weak keys. + LI.inspect(buffer.ast, buffer.tokenlist, buffer.src, report) + end +end +--IMPROVE? possibly should reparse AST as well in case AST got corrupted. + + +-- Command to list erorrs and warnings. +-- CATEGORY: SciTE command +function M.list_warnings() + if not buffer.ast then return end + + local warnings = LI.list_warnings(buffer.tokenlist, buffer.src) + + if #warnings > 0 then + for i,err in ipairs(warnings) do + print(err) + end + print("To loop through warnings, press F4.") + --scite_UserListShow(errors) + end +end + + +-- Command to select smallest statement (or comment) containing selection. +-- Executing multiple times selects larger statements containing current statement. +-- CATEGORY: SciTE command +function M.select_statementblockcomment() + if buffer.src ~= editor:GetText() then return end -- skip if AST not up-to-date + + -- Get selected position range. + -- caution: SciTE appears to have an odd behavior where if SetSel + -- is performed with CurrentPos at the start of a new line, + -- then Anchor and CurrentPos get reversed. Similar behavior is observed + -- when holding down the shift key and pressing the right arrow key + -- until the cursor advances to the next line. + -- In any case, we want to handle reversed ranges. + local fpos, lpos = editor.Anchor, editor.CurrentPos + if lpos < fpos then fpos, lpos = lpos, fpos end -- swap + fpos, lpos = fpos + 1, lpos + 1 - 1 + local fpos, lpos = LA.select_statementblockcomment(buffer.ast, buffer.tokenlist, fpos, lpos, true) + editor:SetSel(fpos-1, lpos-1 + 1) +end + + +-- Command to jump to beginning or end of previous statement (whichever is closer). +-- CATEGORY: SciTE command +function M.goto_previous_statement() + local pos1 = editor.CurrentPos+1 + if pos1 == 1 then return end + pos1 = pos1 - 1 -- ensures repeated calls advance back + local mast, isafter = LA.current_statementblock(buffer.ast, buffer.tokenlist, pos1) + local fpos, lpos = LA.ast_pos_range(mast, buffer.tokenlist) + if (editor.CurrentPos+1) > lpos + 1 then + editor:GotoPos(lpos+1-1) + else + editor:GotoPos(fpos-1) + end +end + +-- Lua module searcher function that attemps to retrieve module from +-- same file path as current file. +-- CATEGORY: SciTE + file loading +local function mysearcher(name) + local tries = "" + local dir = props.FileDir + repeat + for i=1,2 do + local path = dir .. '/' .. name:gsub("%.", "/") .. + (i==1 and ".lua" or "/init.lua") + --DEBUG(path) + local f, err = loadfile(path) + if f then return f end + tries = tries .. "\tno file " .. path .. "\n" + end + dir = dir:gsub("[\\/]?[^\\/]+$", "") + until dir == '' + return tries +end + + +-- Installs properties and other global changes during startup. +-- This function should be called via something like +-- +-- local LUAINSPECT_PATH = "c:/lua-inspect" +-- package.path = package.path .. ";" .. LUAINSPECT_PATH .. "/metalualib/?.lua" +-- package.path = package.path .. ";" .. LUAINSPECT_PATH .. "/lib/?.lua" +-- require "luainspect.scite".install() +-- +-- from the SciTE Lua startup script, i.e. the file identified in the +-- `ext.lua.startup.script` property. +-- If the Lua startup script is ExtMan, you may optionally instead call +-- this from an ExtMan script (i.e. Lua file inside the ExtMan "scite_lua" folder. +-- This function does not work correctly if called from a Lua extension script, +-- i.e. the file identified in the `extension.*.lua` property, because by the +-- time the extension script has been loaded SciTE has already applied +-- styles from the properties so customizations here will be ignored until a +-- buffer switch. +-- +-- CATEGORY: initialization +function M.install() + + -- apply styles if not overridden in properties file. + + if props['extension.*.lua'] == '' then + local thisfilepath = assert(assert(debug.getinfo(1).source):gsub('^@', '')) + print(thisfilepath) + props['extension.*.lua'] = thisfilepath + -- Q: is there a cleaner way? + end + + local light_styles = [[ +# This can be customized in your properties file. +lexer.*.lua=script_lua +style.script_lua.default=fore:#000000 +style.script_lua.local=fore:#000080 +style.script_lua.local_mutate=fore:#000080,italics +style.script_lua.local_unused=fore:#ffffff,back:#000080 +style.script_lua.local_param=fore:#000040 +style.script_lua.local_param_mutate=fore:#000040,italics +style.script_lua.upvalue=fore:#0000ff +style.script_lua.upvalue_mutate=fore:#0000ff,italics +style.script_lua.global_recognized=fore:#600000 +style.script_lua.global_unrecognized=fore:#ffffff,back:#ff0000,bold +style.script_lua.field_recognized=fore:#600000 +style.script_lua.field=fore:#c00000 +style.script_lua.comment=fore:#008000 +style.script_lua.string=fore:#00c000 +style.script_lua.tab=back:#f0f0f0 +style.script_lua.keyword=fore:#505050,bold +style.script_lua.compiler_error=fore:#800000,back:#ffffc0 + +# From SciTE docs: +# As well as the styles generated by the lexer, there are other numbered styles used. +# Style 32 is the default style and its features will be inherited by all other styles unless overridden. +# Style 33 is used to display line numbers in the margin. +# Styles 34 and 35 are used to display matching and non-matching braces respectively. +# Style 36 is used for displaying control characters. This is not a full style as the foreground and background colours for control characters are determined by their lexical state rather than this style. +# Style 37 is used for displaying indentation guides. Only the fore and back are used. +# A * can be used instead of a lexer to indicate a global style setting. +#style.script_lua.32=back:#000000 +#style.script_lua.33= +#style.script_lua.33= +#style.script_lua.34= +#style.script_lua.36= +#style.script_lua.37= + +# warning: these changes are global for all file types: +caret.line.back=#ffff00 +caret.line.back.alpha=20 +]] + + -- or dark background style + local dark_styles = [[ +lexer.*.lua=script_lua +style.script_lua.32=back:#000000 +style.script_lua.default=fore:#ffffff +style.script_lua.local=fore:#c0c0ff +style.script_lua.local_mutate=fore:#c0c0ff,italics +style.script_lua.local_unused=fore:#ffffff,back:#000080 +style.script_lua.local_param=fore:#8080ff +style.script_lua.local_param_mutate=fore:#8080ff,italics +style.script_lua.upvalue=fore:#e8e8ff +style.script_lua.upvalue_mutate=fore:#e8e8ff,italics +style.script_lua.global_recognized=fore:#ffc080 +style.script_lua.global_unrecognized=fore:#ffffff,back:#ff0000,bold +style.script_lua.field_recognized=fore:#ffc080 +style.script_lua.field=fore:#ff0000 +style.script_lua.comment=fore:#009000 +style.script_lua.string=fore:#80c080 +style.script_lua.tab=back:#303030 +style.script_lua.keyword=fore:#a0a080,bold +style.script_lua.compiler_error=fore:#800000,back:#ffffc0 +style.script_lua.indic_style=6 +style.script_lua.indic_fore=#808080 +# warning: these changes are global for all file types. Avoid #ffffff in case those +# are light styles +style.script_lua.caret.fore=#c0c0c0 +style.script_lua.caret.line.back=#ffff00 +style.script_lua.caret.line.back.alpha=20 +style.script_lua.selection.alpha=128 +style.script_lua.selection.back=#808080 +]] + + local styles = (props['style.script_lua.scheme'] == 'dark') and dark_styles or light_styles + + for style in styles:gmatch("[^\n]+") do + if not (style:match("^%s*#") or style:match("^%s*$")) then + local name, value = style:match("^([^=]+)=(.*)"); assert(name, style) + local realname =string.gsub(name, '^(style%.script_lua%.)(.+)$', function(first, last) + return STYLES[last] and first .. STYLES[last] or + last:match'^%d+$' and name or last + end) -- convert to real style name + if props[name] ~= '' then value = props[name] end -- override by user + --DEBUG(realname .. '=' .. value) + props[realname] = value + end + end + -- DESIGN:SciTE: The above technique does not work ideally. A property like `selection.back` + -- may be pre-defined by SciTE, and then we'd want this script to override that default, and + -- finally we'd want to allow the user to override that in property files. However, this script + -- is run after property files are applied and doesn't know whether a property + -- has been re-defined in a property file unless the property was left blank by SciTE and the + -- user property file changed it to a non-blank value. This is the reason why the above + -- dark_styles uses style.script_lua.selection.back (which is undefined by SciTE) rather + -- than selection.back (which SciTE may predefine to a non-blank value). It would be + -- preferrable if SciTE would allow this script to define default properties before properties + -- are read from property files. + + scite_Command("Rename all instances of selected variable|*luainspect_rename_selected_variable $(1)|*.lua|Ctrl+Alt+R") + scite_Command("Go to definition of selected variable|luainspect_goto_definition|*.lua|Ctrl+Alt+D") + scite_Command("Show all variable uses|luainspect_show_all_variable_uses|*.lua|Ctrl+Alt+U") + scite_Command("Inspect table contents|luainspect_inspect_variable_contents|*.lua|Ctrl+Alt+B") + scite_Command("Select current statement, block or comment|luainspect_select_statementblockcomment|*.lua|Ctrl+Alt+S") + scite_Command("Force full reinspection of all code|luainspect_force_reinspect|*.lua|Ctrl+Alt+Z") + scite_Command("Goto previous statement|luainspect_goto_previous_statement|*.lua|Ctrl+Alt+Up") + scite_Command("Autocomplete variable|luainspect_autocomplete_variable|*.lua|Ctrl+Alt+C") + scite_Command("List all errors/warnings|luainspect_list_warnings|*.lua|Ctrl+Alt+E") + --FIX: user.context.menu=Rename all instances of selected variable|1102 or props['user.contextmenu'] + _G.luainspect_rename_selected_variable = M.rename_selected_variable + _G.luainspect_goto_definition = M.goto_definition + _G.luainspect_inspect_variable_contents = M.inspect_variable_contents + _G.luainspect_show_all_variable_uses = M.show_all_variable_uses + _G.luainspect_select_statementblockcomment = M.select_statementblockcomment + _G.luainspect_force_reinspect = M.force_reinspect + _G.luainspect_goto_previous_statement = M.goto_previous_statement + _G.luainspect_autocomplete_variable = M.autocomplete_variable + _G.luainspect_list_warnings = M.list_warnings + + + -- Allow finding modules. + table.insert(package.loaders, mysearcher) + if PATH_APPEND ~= '' then + package.path = package.path .. ';' .. PATH_APPEND + end + if CPATH_APPEND ~= '' then + package.cpath = package.cpath .. ';' .. CPATH_APPEND + end + + -- Make package.loaded have weak values. This makes modules more readilly get unloaded, + -- such as when doing force_reinspect. + -- WARNING: Global change to Lua. + local oldmt = getmetatable(package.loaded) + local mt = oldmt or {} + if not mt.__mode then mt.__mode = 'v' end + if not oldmt then setmetatable(package.loaded, mt) end + + _G.luainspect_installed = true +end + + +-- Installs a SciTE event handler locally for the current buffer. +-- If an existing global handler exists (this includes ExtMan handlers), +-- ensure that is still called also. +-- CATEGORY: initialization. +local function install_handler(name) + local local_handler = M[name] + local global_handler = _G[name] + _G[name] = function(...) + local_handler(...) + if global_handler then global_handler(...) end + end +end + + +-- Installs extension interface. +-- This function should be called via +-- +-- require "luainspect.scite".install_extension() +-- +-- from your Lua extension script +-- (the file identified in your `extension.*.lua` property) or by +-- setting your `extension.*.lua` property to this file +-- (NOTE: the `install` function automatically does +-- this for you). Do not call this from your SciTE Lua startup script +-- (the file identified in your `ext.lua.startup.script` property) because +-- that would activate these events for non-Lua files as well. +-- +-- CATEGORY: initialization +function M.install_extension() + if not _G.luainspect_installed then + error([[ +ERROR: Please add `require "luainspect.scite".setup_install()` (but +without ``) to your SciTE Lua startup script (i.e. the file identified in your +`ext.lua.startup.script` property (i.e. ]] .. props['ext.lua.startup.script'] ..').', 0) + end + + -- Install event handlers for this buffer. + install_handler'OnStyle' + install_handler'OnUpdateUI' + install_handler'OnDoubleClick' + if AUTOCOMPLETE_VARS or AUTOCOMPLETE_SYNTAX then + install_handler'OnChar' + end + install_handler'OnKey' + install_handler'OnOpen' + install_handler'OnBeforeSave' + install_handler'OnSwitchFile' + + -- Define markers and indicators. + editor:MarkerDefine(MARKER_ERRORLINE, SC_MARK_CHARACTER+33) -- '!' + editor:MarkerSetFore(MARKER_ERRORLINE, 0xffffff) + editor:MarkerSetBack(MARKER_ERRORLINE, 0x0000ff) + editor:MarkerDefine(MARKER_ERROR, SC_MARK_FULLRECT) + editor:MarkerSetBack(MARKER_ERROR, 0x000080) + editor:MarkerSetAlpha(MARKER_ERROR, 10) + editor:MarkerDefine(MARKER_SCOPEBEGIN, SC_MARK_TCORNERCURVE) + editor:MarkerDefine(MARKER_SCOPEMIDDLE, SC_MARK_VLINE) + editor:MarkerDefine(MARKER_SCOPEEND, SC_MARK_LCORNERCURVE) + editor:MarkerSetFore(MARKER_SCOPEBEGIN, 0x0000ff) + editor:MarkerSetFore(MARKER_SCOPEMIDDLE, 0x0000ff) + editor:MarkerSetFore(MARKER_SCOPEEND, 0x0000ff) + editor:MarkerDefine(MARKER_MASKED, SC_MARK_CHARACTER+77) -- 'M' + editor:MarkerSetFore(MARKER_MASKED, 0xffffff) + editor:MarkerSetBack(MARKER_MASKED, 0x000080) + editor:MarkerDefine(MARKER_MASKING, SC_MARK_CHARACTER+77) -- 'M' + editor:MarkerSetFore(MARKER_MASKING, 0xffffff) + editor:MarkerSetBack(MARKER_MASKING, 0x0000ff) + editor:MarkerDefine(MARKER_WAIT, SC_MARK_CHARACTER+43) -- '+' + editor:MarkerSetFore(MARKER_WAIT, 0xffffff) + editor:MarkerSetBack(MARKER_WAIT, 0xff0000) + editor.IndicStyle[INDICATOR_AUTOCOMPLETE] = INDIC_BOX + editor.IndicFore[INDICATOR_AUTOCOMPLETE] = 0xff0000 + local indic_style = props["style.script_lua.indic_style"] + local indic_fore = props["style.script_lua.indic_fore"] + editor.IndicStyle[INDICATOR_SCOPE] = + indic_style == '' and INDIC_ROUNDBOX or indic_style + editor.IndicStyle[INDICATOR_KEYWORD] = INDIC_PLAIN + if indic_fore ~= '' then + local color = tonumber(indic_fore:sub(2), 16) + editor.IndicFore[INDICATOR_SCOPE] = color + editor.IndicFore[INDICATOR_KEYWORD] = color + end + editor.IndicStyle[INDICATOR_MASKED] = INDIC_STRIKE + editor.IndicFore[INDICATOR_MASKED] = 0x0000ff + editor.IndicStyle[INDICATOR_MASKING] = INDIC_SQUIGGLE + editor.IndicFore[INDICATOR_MASKING] = 0x0000ff + editor.IndicStyle[INDICATOR_WARNING] = INDIC_SQUIGGLE -- IMPROVE: combine with above? + editor.IndicFore[INDICATOR_WARNING] = 0x008080 + editor.IndicStyle[INDICATOR_DEADCODE] = INDIC_ROUNDBOX + editor.IndicFore[INDICATOR_DEADCODE] = 0x808080 + editor.IndicAlpha[INDICATOR_DEADCODE] = 0x80 + -- editor.IndicStyle[INDICATOR_INVALIDATED] = INDIC_SQUIGGLE + -- editor.IndicFore[INDICATOR_INVALIDATED] = 0x0000ff + + +end + + +-- If this module was not loaded via require, then assume it is being loaded +-- as a SciTE Lua extension script, i.e. `extension.*.lua` property. +if ... == nil then + M.install_extension() +end + + +-- COMMENT:SciTE: when Lua code fails, SciTE by default doesn't display a +-- full stack traceback (debug.traceback) to assist debugging. +-- Presumably the undocumented ext.lua.debug.traceback=1 enables this, +-- but it works oddly, installing `print` rather than `debug.traceback` as +-- the error handling function. Although one can set print to debug.traceback, +-- that breaks print. + + +return M diff --git a/builders/lua-inspect/lib/luainspect/signatures.lua b/builders/lua-inspect/lib/luainspect/signatures.lua new file mode 100644 index 000000000..145ed3fa9 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/signatures.lua @@ -0,0 +1,433 @@ +local M = {} + +local T = require "luainspect.types" + +-- signatures of known globals +M.global_signatures = { + assert = "assert (v [, message])", + collectgarbage = "collectgarbage (opt [, arg])", + dofile = "dofile (filename)", + error = "error (message [, level])", + _G = "(table)", + getfenv = "getfenv ([f])", + getmetatable = "getmetatable (object)", + ipairs = "ipairs (t)", + load = "load (func [, chunkname])", + loadfile = "loadfile ([filename])", + loadstring = "loadstring (string [, chunkname])", + next = "next (table [, index])", + pairs = "pairs (t)", + pcall = "pcall (f, arg1, ...)", + print = "print (...)", + rawequal = "rawequal (v1, v2)", + rawget = "rawget (table, index)", + rawset = "rawset (table, index, value)", + select = "select (index, ...)", + setfenv = "setfenv (f, table)", + setmetatable = "setmetatable (table, metatable)", + tonumber = "tonumber (e [, base])", + tostring = "tostring (e)", + type = "type (v)", + unpack = "unpack (list [, i [, j]])", + _VERSION = "(string)", + xpcall = "xpcall (f, err)", + module = "module (name [, ...])", + require = "require (modname)", + coroutine = "(table) coroutine manipulation library", + debug = "(table) debug facilities library", + io = "(table) I/O library", + math = "(table) math functions libary", + os = "(table) OS facilities library", + package = "(table) package library", + string = "(table) string manipulation library", + table = "(table) table manipulation library", + ["coroutine.create"] = "coroutine.create (f)", + ["coroutine.resume"] = "coroutine.resume (co [, val1, ...])", + ["coroutine.running"] = "coroutine.running ()", + ["coroutine.status"] = "coroutine.status (co)", + ["coroutine.wrap"] = "coroutine.wrap (f)", + ["coroutine.yield"] = "coroutine.yield (...)", + ["debug.debug"] = "debug.debug ()", + ["debug.getfenv"] = "debug.getfenv (o)", + ["debug.gethook"] = "debug.gethook ([thread])", + ["debug.getinfo"] = "debug.getinfo ([thread,] function [, what])", + ["debug.getlocal"] = "debug.getlocal ([thread,] level, local)", + ["debug.getmetatable"] = "debug.getmetatable (object)", + ["debug.getregistry"] = "debug.getregistry ()", + ["debug.getupvalue"] = "debug.getupvalue (func, up)", + ["debug.setfenv"] = "debug.setfenv (object, table)", + ["debug.sethook"] = "debug.sethook ([thread,] hook, mask [, count])", + ["debug.setlocal"] = "debug.setlocal ([thread,] level, local, value)", + ["debug.setmetatable"] = "debug.setmetatable (object, table)", + ["debug.setupvalue"] = "debug.setupvalue (func, up, value)", + ["debug.traceback"] = "debug.traceback ([thread,] [message] [, level])", + ["io.close"] = "io.close ([file])", + ["io.flush"] = "io.flush ()", + ["io.input"] = "io.input ([file])", + ["io.lines"] = "io.lines ([filename])", + ["io.open"] = "io.open (filename [, mode])", + ["io.output"] = "io.output ([file])", + ["io.popen"] = "io.popen (prog [, mode])", + ["io.read"] = "io.read (...)", + ["io.tmpfile"] = "io.tmpfile ()", + ["io.type"] = "io.type (obj)", + ["io.write"] = "io.write (...)", + ["math.abs"] = "math.abs (x)", + ["math.acos"] = "math.acos (x)", + ["math.asin"] = "math.asin (x)", + ["math.atan"] = "math.atan (x)", + ["math.atan2"] = "math.atan2 (y, x)", + ["math.ceil"] = "math.ceil (x)", + ["math.cos"] = "math.cos (x)", + ["math.cosh"] = "math.cosh (x)", + ["math.deg"] = "math.deg (x)", + ["math.exp"] = "math.exp (x)", + ["math.floor"] = "math.floor (x)", + ["math.fmod"] = "math.fmod (x, y)", + ["math.frexp"] = "math.frexp (x)", + ["math.huge"] = "math.huge", + ["math.ldexp"] = "math.ldexp (m, e)", + ["math.log"] = "math.log (x)", + ["math.log10"] = "math.log10 (x)", + ["math.max"] = "math.max (x, ...)", + ["math.min"] = "math.min (x, ...)", + ["math.modf"] = "math.modf (x)", + ["math.pi"] = "math.pi", + ["math.pow"] = "math.pow (x, y)", + ["math.rad"] = "math.rad (x)", + ["math.random"] = "math.random ([m [, n]])", + ["math.randomseed"] = "math.randomseed (x)", + ["math.sin"] = "math.sin (x)", + ["math.sinh"] = "math.sinh (x)", + ["math.sqrt"] = "math.sqrt (x)", + ["math.tan"] = "math.tan (x)", + ["math.tanh"] = "math.tanh (x)", + ["os.clock"] = "os.clock ()", + ["os.date"] = "os.date ([format [, time]])", + ["os.difftime"] = "os.difftime (t2, t1)", + ["os.execute"] = "os.execute ([command])", + ["os.exit"] = "os.exit ([code])", + ["os.getenv"] = "os.getenv (varname)", + ["os.remove"] = "os.remove (filename)", + ["os.rename"] = "os.rename (oldname, newname)", + ["os.setlocale"] = "os.setlocale (locale [, category])", + ["os.time"] = "os.time ([table])", + ["os.tmpname"] = "os.tmpname ()", + ["package.cpath"] = "package.cpath", + ["package.loaded"] = "package.loaded", + ["package.loaders"] = "package.loaders", + ["package.loadlib"] = "package.loadlib (libname, funcname)", + ["package.path"] = "package.path", + ["package.preload"] = "package.preload", + ["package.seeall"] = "package.seeall (module)", + ["string.byte"] = "string.byte (s [, i [, j]])", + ["string.char"] = "string.char (...)", + ["string.dump"] = "string.dump (function)", + ["string.find"] = "string.find (s, pattern [, init [, plain]])", + ["string.format"] = "string.format (formatstring, ...)", + ["string.gmatch"] = "string.gmatch (s, pattern)", + ["string.gsub"] = "string.gsub (s, pattern, repl [, n])", + ["string.len"] = "string.len (s)", + ["string.lower"] = "string.lower (s)", + ["string.match"] = "string.match (s, pattern [, init])", + ["string.rep"] = "string.rep (s, n)", + ["string.reverse"] = "string.reverse (s)", + ["string.sub"] = "string.sub (s, i [, j])", + ["string.upper"] = "string.upper (s)", + ["table.concat"] = "table.concat (table [, sep [, i [, j]]])", + ["table.insert"] = "table.insert (table, [pos,] value)", + ["table.maxn"] = "table.maxn (table)", + ["table.remove"] = "table.remove (table [, pos])", + ["table.sort"] = "table.sort (table [, comp])", +} + +-- utility function. Converts e.g. name 'math.sqrt' to its value. +local function resolve_global_helper_(name) + local o = _G + for fieldname in name:gmatch'[^%.]+' do o = o[fieldname] end + return o +end +local function resolve_global(name) + local a, b = pcall(resolve_global_helper_, name) + if a then return b else return nil, b end +end + +-- Same as global_signatures but maps value (not name) to signature. +M.value_signatures = {} +local isobject = {['function']=true, ['table']=true, ['userdata']=true, ['coroutine']=true} +for name,sig in pairs(M.global_signatures) do + local val, err = resolve_global(name) + if isobject[type(val)] then + M.value_signatures[val] = sig + end +end + +-- min,max argument counts. +M.argument_counts = { + [assert] = {1,2}, + [collectgarbage] = {1,2}, + [dofile] = {1}, + [error] = {1,2}, + [getfenv or false] = {0,1}, + [getmetatable] = {1,1}, + [ipairs] = {1,1}, + [load] = {1,2}, + [loadfile] = {0,1}, + [loadstring] = {1,2}, + [next] = {1,2}, + [pairs] = {1,1}, + [pcall] = {1,math.huge}, + [print] = {0,math.huge}, + [rawequal] = {2,2}, + [rawget] = {2,2}, + [rawset] = {3,3}, + [select] = {1, math.huge}, + [setfenv or false] = {2,2}, + [setmetatable] = {2,2}, + [tonumber] = {1,2}, + [tostring] = {1}, + [type] = {1}, + [unpack] = {1,3}, + [xpcall] = {2,2}, + [module] = {1,math.huge}, + [require] = {1,1}, + [coroutine.create] = {1,1}, + [coroutine.resume] = {1, math.huge}, + [coroutine.running] = {0,0}, + [coroutine.status] = {1,1}, + [coroutine.wrap] = {1,1}, + [coroutine.yield] = {0,math.huge}, + [debug.debug] = {0,0}, + [debug.getfenv or false] = {1,1}, + [debug.gethook] = {0,1}, + [debug.getinfo] = {1,3}, + [debug.getlocal] = {2,3}, + [debug.getmetatable] = {1,1}, + [debug.getregistry] = {0,0}, + [debug.getupvalue] = {2,2}, + [debug.setfenv or false] = {2,2}, + [debug.sethook] = {2,4}, + [debug.setlocal] = {3,4}, + [debug.setmetatable] = {2,2}, + [debug.setupvalue] = {3,3}, + [debug.traceback] = {0,3}, + [io.close] = {0,1}, + [io.flush] = {0,0}, + [io.input] = {0,1}, + [io.lines] = {0,1}, + [io.open] = {1,2}, + [io.output] = {0,1}, + [io.popen] = {1,2}, + [io.read] = {0,math.huge}, + [io.tmpfile] = {0}, + [io.type] = {1}, + [io.write] = {0,math.huge}, + [math.abs] = {1}, + [math.acos] = {1}, + [math.asin] = {1}, + [math.atan] = {1}, + [math.atan2] = {2,2}, + [math.ceil] = {1,1}, + [math.cos] = {1,1}, + [math.cosh] = {1,1}, + [math.deg] = {1,1}, + [math.exp] = {1,1}, + [math.floor] = {1,1}, + [math.fmod] = {2,2}, + [math.frexp] = {1,1}, + [math.ldexp] = {2,2}, + [math.log] = {1,1}, + [math.log10] = {1,1}, + [math.max] = {1,math.huge}, + [math.min] = {1,math.huge}, + [math.modf] = {1,1}, + [math.pow] = {2,2}, + [math.rad] = {1,1}, + [math.random] = {0,2}, + [math.randomseed] = {1,1}, + [math.sin] = {1,1}, + [math.sinh] = {1,1}, + [math.sqrt] = {1,1}, + [math.tan] = {1,1}, + [math.tanh] = {1,1}, + [os.clock] = {0,0}, + [os.date] = {0,2}, + [os.difftime] = {2,2}, + [os.execute] = {0,1}, + [os.exit] = {0,1}, + [os.getenv] = {1,1}, + [os.remove] = {1,1}, + [os.rename] = {2,2}, + [os.setlocale] = {1,2}, + [os.time] = {0,1}, + [os.tmpname] = {0,0}, + [package.loadlib] = {2,2}, + [package.seeall] = {1,1}, + [string.byte] = {1,3}, + [string.char] = {0,math.huge}, + [string.dump] = {1,1}, + [string.find] = {2,4}, + [string.format] = {1,math.huge}, + [string.gmatch] = {2,2}, + [string.gsub] = {3,4}, + [string.len] = {1,1}, + [string.lower] = {1,1}, + [string.match] = {2,3}, + [string.rep] = {2,2}, + [string.reverse] = {1,1}, + [string.sub] = {2,3}, + [string.upper] = {1,1}, + [table.concat] = {1,4}, + [table.insert] = {2,3}, + [table.maxn] = {1,1}, + [table.remove] = {1,2}, + [table.sort] = {1,2}, + [false] = nil -- trick (relies on potentially undefined behavior) +} + + +-- functions with zero or nearly zero side-effects, and with deterministic results, that may be evaluated by the analyzer. +M.safe_function = { + [require] = true, + [rawequal] = true, + [rawget] = true, + [require] = true, -- sort of + [select] = true, + [tonumber] = true, + [tostring] = true, + [type] = true, + [unpack] = true, + [coroutine.create] = true, + -- [coroutine.resume] + [coroutine.running] = true, + [coroutine.status] = true, + [coroutine.wrap] = true, + --[coroutine.yield] + -- [debug.debug] + --[debug.getfenv] = true, + [debug.gethook] = true, + [debug.getinfo] = true, + [debug.getlocal] = true, + [debug.getmetatable] = true, + [debug.getregistry] = true, + [debug.getupvalue] = true, + -- [debug.setfenv] + -- [debug.sethook] + -- [debug.setlocal] + -- [debug.setmetatable] + -- [debug.setupvalue] + -- [debug.traceback] = true, + [io.type] = true, + -- skip all other io.* + [math.abs] = true, + [math.acos] = true, + [math.asin] = true, + [math.atan] = true, + [math.atan2] = true, + [math.ceil] = true, + [math.cos] = true, + [math.cosh] = true, + [math.deg] = true, + [math.exp] = true, + [math.floor] = true, + [math.fmod] = true, + [math.frexp] = true, + [math.ldexp] = true, + [math.log] = true, + [math.log10] = true, + [math.max] = true, + [math.min] = true, + [math.modf] = true, + [math.pow] = true, + [math.rad] = true, + --[math.random] + --[math.randomseed] + [math.sin] = true, + [math.sinh] = true, + [math.sqrt] = true, + [math.tan] = true, + [math.tanh] = true, + [os.clock] = true, -- safe but non-deterministic + [os.date] = true,-- safe but non-deterministic + [os.difftime] = true, + --[os.execute] + --[os.exit] + [os.getenv] = true, -- though depends on environment + --[os.remove] + --[os.rename] + --[os.setlocale] + [os.time] = true, -- safe but non-deterministic + --[os.tmpname] + [string.byte] = true, + [string.char] = true, + [string.dump] = true, + [string.find] = true, + [string.format] = true, + [string.gmatch] = true, + [string.gsub] = true, + [string.len] = true, + [string.lower] = true, + [string.match] = true, + [string.rep] = true, + [string.reverse] = true, + [string.sub] = true, + [string.upper] = true, + [table.maxn] = true, +} + +M.mock_functions = {} + +-- TODO:IMPROVE +local function mockfunction(func, ...) + local inputs = {n=0} + local outputs = {n=0} + local isoutputs + for i=1,select('#', ...) do + local v = select(i, ...) + if type(v) == 'table' then v = v[1] end + if v == 'N' or v == 'I' then v = T.number end + if v == '->' then + isoutputs = true + elseif isoutputs then + outputs[#outputs+1] = v; outputs.n = outputs.n + 1 + else + inputs[#inputs+1] = v; inputs.n = inputs.n + 1 + end + end + M.mock_functions[func] = {inputs=inputs, outputs=outputs} +end + + +mockfunction(math.abs, 'N', '->', {'N',0,math.huge}) +mockfunction(math.acos, {'N',-1,1}, '->', {'N',0,math.pi/2}) +mockfunction(math.asin, {'N',-1,1}, '->', {'N',-math.pi/2,math.pi/2}) +mockfunction(math.atan, {'N',-math.huge,math.huge}, '->', + {'N',-math.pi/2,math.pi/2}) +--FIX atan2 +mockfunction(math.ceil, 'N','->','I') +mockfunction(math.cos, 'N','->',{'N',-1,1}) +mockfunction(math.cosh, 'N','->',{'N',1,math.huge}) +mockfunction(math.deg, 'N','->','N') +mockfunction(math.exp, 'N','->',{'N',0,math.huge}) +mockfunction(math.floor, 'N','->','I') +mockfunction(math.fmod, 'N','N','->','N') +mockfunction(math.frexp, 'N','->',{'N',-1,1},'->','I') +mockfunction(math.ldexp, {'N','I'},'->','N') +mockfunction(math.log, {'N',0,math.huge},'->','N') +mockfunction(math.log10, {'N',0,math.huge},'->','N') +-- function max(...) print 'NOT IMPL'end +-- function min(...) print 'NOT IMPL'end +mockfunction(math.modf, 'N','->','I',{'N',-1,1}) + +mockfunction(math.pow, 'N','N','->','N') -- improve? +mockfunction(math.rad, 'N','->','N') +-- random = function() print 'NOT IMPL' end +mockfunction(math.randomseed, 'N') +mockfunction(math.sin, 'N','->',{'N',-1,1}) +mockfunction(math.sinh, 'N','->','N') +mockfunction(math.sqrt, {'N',0,math.huge},'->',{'N',0,math.huge}) +mockfunction(math.tan, 'N','->','N') -- improve? +mockfunction(math.tanh, 'N','->',{'N',-1,1}) + + +return M diff --git a/builders/lua-inspect/lib/luainspect/typecheck.lua b/builders/lua-inspect/lib/luainspect/typecheck.lua new file mode 100644 index 000000000..940686091 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/typecheck.lua @@ -0,0 +1,40 @@ +-- luainspect.typecheck - Type definitions used to check LuaInspect itself. +-- +-- (c) 2010 David Manura, MIT License. + +local T = require "luainspect.types" + +local ast_mt = {__tostring = function(s) return 'AST' end} + +return function(context) + -- AST type. + local ast = T.table { + tag = T.string, + lineinfo=T.table{first=T.table{comments=T.table{T.table{T.string,T.number,T.number}},T.number,T.number,T.number,T.string}, + ast=T.table{comments=T.table{T.table{T.string,T.number,T.number}},T.number,T.number,T.number,T.string}}, + isfield=T.boolean, tag2=T.string, + value=T.universal, valueself=T.number, valuelist=T.table{n=T.number, isvaluepegged=T.boolean}, + resolvedname=T.string, definedglobal=T.boolean, id=T.number, isparam=T.boolean, isset=T.boolean, isused=T.boolean, + isignore=T.boolean, + functionlevel=T.number, localmasked=T.boolean, note=T.string, nocollect=T.table{}, isdead=T.boolean} + -- FIX: some of these are "boolean or nil" actually + ast.localdefinition=ast; ast.localmasking = ast + ast.previous = ast; ast.parent = ast + ast.seevalue = ast; ast.seenote=ast + setmetatable(ast, ast_mt) + + ast[1] = ast; ast[2] = ast + context.apply_value('ast$', ast) + + -- Token type. + context.apply_value('token$', T.table{ + tag=T.string, fpos=T.number, lpos=T.number, keywordid=T.number, ast=ast, [1]=T.string + }) + + -- Lua source code string type. + context.apply_value('src$', '') + + -- SciTE syler object type. + local nf = function()end + context.apply_value('^styler$', T.table{SetState=nf, More=nf, Current=nf, Forward=nf, StartStyling=nf, EndStyling=nf, language=T.string}) +end diff --git a/builders/lua-inspect/lib/luainspect/types.lua b/builders/lua-inspect/lib/luainspect/types.lua new file mode 100644 index 000000000..cb3e18382 --- /dev/null +++ b/builders/lua-inspect/lib/luainspect/types.lua @@ -0,0 +1,130 @@ +local T = {} -- types + +-- istype[o] iff o represents a type (i.e. set of values) +T.istype = {} + +-- iserror[o] iff o represents an error type (created via T.error). +T.iserror = {} + +-- istabletype[o] iff o represents a table type (created by T.table). +T.istabletype = {} + +-- Number type +T.number = {} +setmetatable(T.number, T.number) +function T.number.__tostring(self) + return 'number' +end +T.istype[T.number] = true + +-- String type +T.string = {} +setmetatable(T.string, T.string) +function T.string.__tostring(self) + return 'string' +end +T.istype[T.string] = true + +-- Boolean type +T.boolean = {} +setmetatable(T.boolean, T.boolean) +function T.boolean.__tostring(self) + return 'boolean' +end +T.istype[T.boolean] = true + +-- Table type +function T.table(t) + T.istype[t] = true + T.istabletype[t] = true + return t +end + +-- Universal type. This is a superset of all other types. +T.universal = {} +setmetatable(T.universal, T.universal) +function T.universal.__tostring(self) + return 'unknown' +end +T.istype[T.universal] = true + +-- nil type. Represents `nil` but can be stored in tables. +T['nil'] = {} +setmetatable(T['nil'], T['nil']) +T['nil'].__tostring = function(self) + return 'nil' +end +T.istype[T['nil']] = true + +-- None type. Represents a non-existent value, in a similar way +-- that `none` is used differently from `nil` in the Lua C API. +T.none = {} +setmetatable(T.none, T.none) +function T.none.__tostring(self) + return 'none' +end +T.istype[T.none] = true + +-- Error type +local CError = {}; CError.__index = CError +function CError.__tostring(self) return "error:" .. tostring(self.value) end +function T.error(val) + local self = setmetatable({value=val}, CError) + T.istype[self] = true + T.iserror[self] = true + return self +end + + +-- Gets a type that is a superset of the two given types. +function T.superset_types(a, b) + if T.iserror[a] then return a end + if T.iserror[b] then return b end + if rawequal(a, b) then -- note: including nil == nil + return a + elseif type(a) == 'string' or a == T.string then + if type(b) == 'string' or b == T.string then + return T.string + else + return T.universal + end + elseif type(a) == 'number' or a == T.number then + if type(b) == 'number' or b == T.number then + return T.number + else + return T.universal + end + elseif type(a) == 'boolean' or a == T.boolean then + if type(b) == 'boolean' or b == T.boolean then + return T.boolean + else + return T.universal + end + else + return T.universal -- IMPROVE + end +end +--[[TESTS: +assert(T.superset_types(2, 2) == 2) +assert(T.superset_types(2, 3) == T.number) +assert(T.superset_types(2, T.number) == T.number) +assert(T.superset_types(T.number, T.string) == T.universal) +print 'DONE' +--]] + +-- Determines whether type `o` certainly evaluates to true (true), +-- certainly evaluates to false (false) or could evaluate to either +-- true of false ('?'). +function T.boolean_cast(o) + if T.iserror[o] then -- special case + return '?' + elseif o == nil or o == false or o == T['nil'] then -- all subsets of {nil, false} + return false + elseif o == T.universal or o == T.boolean then -- all supersets of boolean + return '?' + else -- all subsets of universal - {nil, false} + return true + end +end + +return T diff --git a/builders/lua-inspect/luainspect b/builders/lua-inspect/luainspect new file mode 100755 index 000000000..9d2fd7ada --- /dev/null +++ b/builders/lua-inspect/luainspect @@ -0,0 +1,17 @@ +#!/usr/bin/env lua + +-- Set Lua library paths. +-- based on findbin -- https://gist.github.com/1342365 +-- and lib -- https://gist.github.com/1342319 +local function findbin() + local script = arg and arg[0] or '' + local bin = script:gsub('[/\\]?[^/\\]+$', '') -- remove file name + if bin == '' then bin = '.' end + return bin +end +local bin = findbin() +package.path = package.path..';'..bin..'/metalualib/?.lua' +package.path = package.path..';'..bin..'/lib/?.lua' + +require 'luainspect.command' + diff --git a/builders/lua-inspect/metalualib/LICENSE b/builders/lua-inspect/metalualib/LICENSE new file mode 100644 index 000000000..58b0c6724 --- /dev/null +++ b/builders/lua-inspect/metalualib/LICENSE @@ -0,0 +1,35 @@ +Metalua + +Copyright (c) 2006-2997 Fabien Fleutot + +Metalua is available under the MIT licence. + +Significant parts of the compiler borrow code from other projects, +all released under the MIT license: +- Lua +- Kein-Hong Man's Yueliang +- Tomás Guisasola's Lua Rings +- Ben Sunshine-Hill's Pluto +- Thomas Reuben's Bitlib + +MIT License +=========== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/builders/lua-inspect/metalualib/README.TXT b/builders/lua-inspect/metalualib/README.TXT new file mode 100644 index 000000000..b9e5cc1b8 --- /dev/null +++ b/builders/lua-inspect/metalualib/README.TXT @@ -0,0 +1,397 @@ +README.TXT +========== +For installation matters, cf. INSTALL.TXT + +Metalua 0.5 +=========== + +Metalua is a static metaprogramming system for Lua: a set of tools +that let you alter the compilation process in arbitrary, powerful and +maintainable ways. For the potential first-time users of such a +system, a descripition of these tools, as implemented by Metalua, +follows. + +Dynamic Parsers +--------------- + +One of the tools is the dynamic parser, which allows a source file to +change the grammar recognized by the parser, while it's being +parsed. Taken alone, this feature lets you make superficial syntax +tweaks on the language. The parser is based on a parser combinator +library called 'gg'; you should know the half dozen functions in gg +API to do advanced things: + +- There are a couple of very simple combinators like gg.list, + gg.sequence, qq.multisequence, gg.optkeyword etc. that offer a level + of expressiveness comparable to Yacc-like parsers. For instance, if + mlp.expr parses Lua expressions, gg.list{ mlp.expr } creates a + parser which handles lists of Lua expressions. + +- Since you can create all the combinators you can think of (they're + regular, higher-order functions), there also are combinators + specialized for typical language tasks. In Yacc-like systems, the + language definition quickly becomes unreadable, because all + non-native features have to be encoded in clumsy and brittle ways. + So if your parser won't natively let you specify infix operator + precedence and associativity easily, tough luck for you and your + code maintainers. With combinators OTOH, most of such useful + functions already exist, and you can write your owns without + rewriting the parser itself. For instance, adding an infix operator + would just look like: + + > mlp.expr.infix:add{ "xor", prec=40, assoc='left', builder=xor_builder } + + Moreover, combinators tend to produce usable error messages when fed + with syntactically incorrect inputs. It matters, because clearly + explaining why an invalid input is invalid is almost as important as + compiling a valid one, for a use=able compiler. + +Yacc-like systems might seem simpler to adopt than combinators, as +long as they're used on extremely simple problems. However, if you +either try to write something non trivial, or to write a simple macro +in a robust way, you'll need to use lots of messy tricks and hacks, +and spend much more time getting them (approximately) right than +that 1/2 hour required to master the regular features of gg. + + +Real meta-programming +--------------------- + +If you plan to go beyond trivial keyword-for-keyword syntax tweaks, +what will limit you is not syntax definition, but the ability to +manipulate source code conveniently: without the proper tools and +abstractions, even the simplest tasks will turn into a dirty hacks +fest, then either into a maintenance nightmare, or simply into +abandonware. Providing an empowering framework so that you don't get +stuck in such predicaments is Metalua's whole purpose. The central +concept is that programs prefer to manipulate code as trees, whereas +most developers prefer ASCII sources, so both representations must be +freely interchangeable. The make-or-break deal is then: + +- To easily let users see sources as trees, as sources, or as + combination thereof, and switch representations seamlessly. + +- To offer the proper libraries, that won't force you to reinvent a + square wheel, will take care of the most common pitfalls, won't + force you to resort to brittle hacks. + +On the former point, Lisps are at a huge advantage, their user syntax +already being trees. But languages with casual syntax can also offer +interchangeable tree/source views; Metalua has some quoting +{ ... } +and anti-quoting -{ ... } operators which let you switch between both +representations at will: internally it works on trees, but you always +have the option to see them as quoted sources. Metalua also supports a +slightly improved syntax for syntax trees, to improve their +readability. + +Library-wise, Metalua offers a set of syntax tree manipulation tools: + +- Structural pattern matching, a feature traditionally found in + compiler-writing specialized languages (and which has nothing to do + with string regular expressions BTW), which lets you express + advanced tree analysis operations in a compact, readable and + efficient way. If you have to work with advanced data structures + and you try it, you'll never go back. + +- The walker library allows you to perform transformations on big + portions of programs. It lets you easily express things like: + "replace all return statements which aren't in a nested function by + error statements", "rename all local variables and their instances + into unique fresh names", "list the variables which escape this + chunk's scope", "insert a type-checking instruction into every + assignments to variable X", etc. Most of non-trivial macros will + require some of those global code transformations, if you really want + them to behave correctly. + +- Macro hygiene, although not perfect yet in Metalua, is required if + you want to make macro writing reasonably usable (and contrary to a + popular belief, renaming local variables into fresh names only + address the easiest part of the hygiene issue; cf. changelog below + for more details). + +- The existing extensions are progressively refactored in more modular + ways, so that their features can be effectively reused in other + extensions. + + +Noteworthy changes from 0.4.1 to 0.5 +==================================== + +Simplification of the install and structure: + +- This release is included in Lua for Windows, so it now couldn't get simpler + for MS-Windows users! + +- Metalua is written in pure Lua again, thus making it platform-independant. + No more mandatory C libraries. Pluto interface might be back, as an option, + in a future version, but it's not worth the install trouble involved by + DLL dependencies. + +- Simpler build process, just run make.sh or make.bat depending on your OS. + +- Metalua libraries are now in a separate metalua/* package. This allows to + mix them with other Lua libraries, and to use them from plain Lua programs + if you FIXME + + +Other changes: + +- new option -S in metalua: prints sources re-generated from AST, after macro + expansion. + +- compatible with more Lua VMs: 64 bits numbers, integral numbers, big endians... + +- some new extensions: xloop, xmatch, improved match. + +- ASTs now keep track of the source extract that generated them (API is not + mature though, it will be changed and broken). + +- improved table printer: support of a plain-Lua mode, alternative indentation + mode for deeply-nested tables. + +- added a generic table serializer, which handles shared and recursive + sub-tables correctly. + +- gg API has been made slightly more flexible, as a first step towards a + comprehensive syntax support for gg grammar definition. Follow the gg-syntax + branch on github for ongoing work. + + +Noteworthy changes from 0.4 to 0.4.1 +==================================== + +- Proper reporting of runtime errors +- Interactive REPL loop +- Support for 64 bits architectures +- Update to Pluto 2.2 and Lua 5.1.3 +- Build for Visual Studio .NET + +Notworthy changes from 0.3 to 0.4 +================================= + +- A significantly bigger code base, mostly due to more libraries: + about 2.5KLoC for libs, 4KLoC for the compiler. However, this remains + tiny in today's desktop computers standards. You don't have to know + all of the system to do useful stuff with it, and since compiled + files are Lua 5.1 compatible, you can keep the "big" system on a + development platform, and keep a lightweight runtime for embedded or + otherwise underpowered targets. + + +- The compiler/interpreter front-end is completely rewritten. The new + frontend program, aptly named 'Metalua', supports proper passing of + arguments to programs, and is generally speaking much more user + friendly than the mlc from the previous version. + + +- Metalua source libraries are looked for in environmemt variable + LUA_MPATH, distinct from LUA_PATH. This way, in an application + that's part Lua part Metalua, you keep a natural access to the + native Lua compiler. + + By convention, Metalua source files should have extension .mlua. By + default, bytecode and plain lua files have higher precedence than + Metalua sources, which lets you easily precompile your libraries. + + +- Compilation of files are separated in different Lua Rings: this + prevents unwanted side-effects when several files are compiled + (This can be turned off, but shouldn't be IMO). + + +- Metalua features are accessible programmatically. Library + 'Metalua.runtime' loads only the libraries necessary to run an + already compiled file; 'Metalua.compile' loads everything useful at + compile-time. + + Transformation functions are available in a library 'mlc' that + contains all meaningful transformation functions in the form + 'mlc.destformat_of_sourceformat()', such as 'mlc.luacfile_of_ast()', + 'mlc.function_of_luastring()' etc. This library has been + significantly completed and rewritten (in Metalua) since v0.3. + + +- Helper libraries have been added. For now they're in the + distribution, at some point they should be luarocked in. These + include: + - Lua Rings and Pluto, duct-taped together into Springs, an improved + Rings that lets states exchange arbitrary data instead of just + scalars and strings. Since Pluto requires a (minor) patch to the + VM, it can be disabled. + - Lua bits for bytecode dumping. + - As always, very large amounts of code borrowed from Yueliang. + - As a commodity, I've also packaged Lua sources in. + + +- Extensions to Lua standard libraries: many more features in table + and the baselib, a couple of string features, and a package system + which correctly handles Metalua source files. + + +- Builds on Linux, OSX, Microsoft Visual Studio. Might build on mingw + (not tested recently, patches welcome). It's easily ported to all + systems with a full support for lua, and if possible dynamic + libraries. + + The MS-windows building is based on a dirty .bat script, because + that's pretty much the only thing you're sure to find on a win32 + computer. It uses Microsoft Visual Studio as a compiler (tested with + VC++ 6). + + Notice that parts of the compiler itself are now written in Metalua, + which means that its building now goes through a bootstrapping + stage. + + +- Structural pattern matching improvements: + - now also handles string regular expressions: 'someregexp'/pattern + will match if the tested term is a string accepted by the regexp, + and on success, the list of captures done by the regexp is matched + against pattern. + - Matching of multiple values has been optimized + - the default behavior when no case match is no to raise an error, + it's the most commonly expected case in practice. Trivial to + cancel with a final catch-all pattern. + - generated calls to type() are now hygienic (it's been the cause of + a puzzling bug report; again, hygiene is hard). + + +- AST grammar overhaul: + The whole point of being alpha is to fix APIs with a more relaxed + attitude towards backward compatibility. I think and hope it's the + last AST revision, so here is it: + - `Let{...} is now called `Set{...} + (Functional programmers would expect 'Let' to introduce an + immutable binding, and assignment isn't immutable in Lua) + - `Key{ key, value } in table literals is now written `Pair{ key, value } + (it contained a key *and* its associated value; besides, 'Pair' is + consistent with the name of the for-loop iterator) + - `Method{...} is now `Invoke{...} + (because it's a method invocation, not a method declaration) + - `One{...} is now `Paren{...} and is properly documented + (it's the node representing parentheses: it's necessary, since + parentheses are sometimes meaningful in Lua) + - Operator are simplified: `Op{ 'add', +{2}, +{2} } instead of + `Op{ `Add, +{2}, +{2} }. Operator names match the corresponding + metatable entries, without the leading double-underscore. + - The operators which haven't a metatable counterpart are + deprecated: 'ne', 'ge', 'gt'. + + +- Overhaul of the code walking library: + - the API has been simplified: the fancy predicates proved more + cumbersome to use than a bit of pattern matching in the visitors. + - binding identifiers are handled as a distinct AST class + - walk.id is scope-aware, handles free and bound variables in a + sensible way. + - the currified API proved useless and sometimes cumbersome, it's + been removed. + + +- Hygiene: I originally planned to release a full-featured hygienic + macro system with v0.4, but what exists remains a work in + progress. Lua is a Lisp-1, which means unhygienic macros are very + dangerous, and hygiene a la Scheme pretty much limits macro writing + to a term rewriting subset of the language, which would be crippling + to use. + + Note: inside hygiene, i.e. preventing macro code from capturing + variables in user code, is trivial to address through alpha + conversion, it's not the issue. The trickier part is outside + hygiene, when user's binders capture globals required by the + macro-generated code. That's the cause of pretty puzzling and hard + to find bugs. And the *really* tricky part, which is still an open + problem in Metalua, is when you have several levels of nesting + between user code and macro code. For now this case has to be + hygienized by hand. + + Note 2: Converge has a pretty powerful approach to hygienic macros + in a Lisp-1 language; for reasons that would be too long to expose + here, I don't think its approach would be the best suited to Metalua. + But I might well be proved wrong eventually. + + Note 3: Redittors must have read that Paul Graham has released Arc, + which is also a Lisp-1 with Common Lisp style macros; I expect this + to create a bit of buzz, out of which might emerge proper solutions + the macro hygiene problem. + + +- No more need to create custom syntax for macros when you don't want + to. Extension 'dollar' will let you declare macros in the dollar + table, as in +{block: function dollar.MYMACRO(a, b, c) ... end}, + and use it as $MYMACRO(1, 2, 3) in your code. + + With this extension, you can write macros without knowing anything + about the Metalua parser. Together with quasi-quotes and automatic + hygiene, this will probably be the closest we can go to "macros for + dummies" without creating an unmaintainable mess generator. + + Besides, it's consistent with my official position that focusing on + superficial syntax issues is counter-productive most of the time :) + + +- Lexers can be switched on the fly. This lets you change the set of + keywords temporarily, with the new gg.with_lexer() combinator. You + can also handle radically different syntaxes in a single file (think + multiple-languages systems such as LuaTeX, or programs+goo as PHP). + + +- Incorporation of the bug fixes reported to the mailing list and on + the blog. + + +- New samples and extensions, in various states of completion: + + * lists by comprehension, a la python/haskell. It includes lists + chunking, e.g. mylist[1 ... 3, 5 ... 7] + + * anaphoric macros for 'if' and 'while' statements: with this + extension, the condition of the 'if'/'while' is bound to variable + 'it' in the body; it lets you write things like: + + > while file:read '*l' do print(it) end. + + No runtime overhead when 'it' isn't used in the body. An anaphoric + variable should also be made accessible for functions, to let + easily write anonymous recursive functions. + + * Try ... catch ... finally extension. Syntax is less than ideal, + but the proper way to fix that is to refactor the match extension + to improve code reuse. There would be many other great ways to + leverage a refactored match extension, e.g. destructuring binds or + multiple dispatch methods. To be done in the next version. + + * with ... do extension: it uses try/finally to make sure that + resources will be properly closed. The only constraint on + resources is that they have to support a :close() releasing method. + For instance, he following code guarantees that file1 and file2 + will be closed, even if a return or an error occurs in the body. + + > with file1, file2 = io.open "f1.txt", io.open "f2.txt" do + > contents = file1:read'*a' .. file2:read ;*a' + > end + + * continue statement, logging facilities, ternary "?:" choice + operator, assignments as expressions, and a couple of similarly + tiny syntax sugar extensions. + + +You might expect in next versions +================================= +The next versions of Metalua will provide some of the following +improvements, in no particular order: better error reporting, +especially at runtime (there's a patch I've been too lazy to test +yet), support for 64 bits CPUs, better support for macro hygiene, more +samples and extensions, an adequate test suite, refactored libraries. + + +Credits +======= + +I'd like to thank the people who wrote the open source code which +makes Metalua run: the Lua team, the authors of Yueliang, Pluto, Lua +Rings, Bitlib; and the people whose bug reports, patches and +insightful discussions dramatically improved the global design, +including John Belmonte, Vyacheslav Egorov, David Manura, Olivier +Gournet, Eric Raible, Laurence Tratt, Alexander Gladysh, Ryan +Pusztai... diff --git a/builders/lua-inspect/metalualib/gg.lua b/builders/lua-inspect/metalualib/gg.lua new file mode 100644 index 000000000..26d93d955 --- /dev/null +++ b/builders/lua-inspect/metalualib/gg.lua @@ -0,0 +1,748 @@ +---------------------------------------------------------------------- +-- Metalua. +-- +-- Summary: parser generator. Collection of higher order functors, +-- which allow to build and combine parsers. Relies on a lexer +-- that supports the same API as the one exposed in mll.lua. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006-2008, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- +-- Parser generators: +-- * [gg.sequence()] +-- * [gg.multisequence()] +-- * [gg.expr()] +-- * [gg.list()] +-- * [gg.onkeyword()] +-- * [gg.optkeyword()] +-- +-- Other functions: +-- * [gg.parse_error()] +-- * [gg.make_parser()] +-- * [gg.is_parser()] +-- +-------------------------------------------------------------------------------- + +module("gg", package.seeall) + +------------------------------------------------------------------------------- +-- parser metatable, which maps __call to method parse, and adds some +-- error tracing boilerplate. +------------------------------------------------------------------------------- +local parser_metatable = { } +function parser_metatable.__call (parser, lx, ...) + --printf ("Call parser %q of type %q", parser.name or "?", parser.kind) + if mlc.metabugs then + return parser:parse (lx, ...) + --local x = parser:parse (lx, ...) + --printf ("Result of parser %q: %s", + -- parser.name or "?", + -- _G.table.tostring(x, "nohash", 80)) + --return x + else + local li = lx:lineinfo_right() or { "?", "?", "?", "?" } + local status, ast = pcall (parser.parse, parser, lx, ...) + if status then return ast else + error (string.format ("%s\n - (l.%s, c.%s, k.%s) in parser %s", + ast:strmatch "gg.lua:%d+: (.*)" or ast, + li[1], li[2], li[3], parser.name or parser.kind)) + end + end +end + +------------------------------------------------------------------------------- +-- Turn a table into a parser, mainly by setting the metatable. +------------------------------------------------------------------------------- +function make_parser(kind, p) + p.kind = kind + if not p.transformers then p.transformers = { } end + function p.transformers:add (x) + table.insert (self, x) + end + setmetatable (p, parser_metatable) + return p +end + +------------------------------------------------------------------------------- +-- Return true iff [x] is a parser. +-- If it's a gg-generated parser, return the name of its kind. +------------------------------------------------------------------------------- +function is_parser (x) + return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind +end + +------------------------------------------------------------------------------- +-- Parse a sequence, without applying builder nor transformers +------------------------------------------------------------------------------- +local function raw_parse_sequence (lx, p) + local r = { } + for i=1, #p do + e=p[i] + if type(e) == "string" then + if not lx:is_keyword (lx:next(), e) then + parse_error (lx, "Keyword '%s' expected", e) end + elseif is_parser (e) then + table.insert (r, e (lx)) + else + gg.parse_error (lx,"Sequence `%s': element #%i is not a string ".. + "nor a parser: %s", + p.name, i, table.tostring(e)) + end + end + return r +end + +------------------------------------------------------------------------------- +-- Parse a multisequence, without applying multisequence transformers. +-- The sequences are completely parsed. +------------------------------------------------------------------------------- +local function raw_parse_multisequence (lx, sequence_table, default) + local seq_parser = sequence_table[lx:is_keyword(lx:peek())] + if seq_parser then return seq_parser (lx) + elseif default then return default (lx) + else return false end +end + +------------------------------------------------------------------------------- +-- Applies all transformers listed in parser on ast. +------------------------------------------------------------------------------- +local function transform (ast, parser, fli, lli) + if parser.transformers then + for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end + end + if type(ast) == 'table'then + local ali = ast.lineinfo + if not ali or ali.first~=fli or ali.last~=lli then + ast.lineinfo = { first = fli, last = lli } + end + end + return ast +end + +------------------------------------------------------------------------------- +-- Generate a tracable parsing error (not implemented yet) +------------------------------------------------------------------------------- +function parse_error(lx, fmt, ...) + local li = lx:lineinfo_left() or {-1,-1,-1, ""} + local msg = string.format("line %i, char %i: "..fmt, li[1], li[2], ...) + local src = lx.src + if li[3]>0 and src then + local i, j = li[3], li[3] + while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end + while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end + local srcline = src:sub (i+1, j-1) + local idx = string.rep (" ", li[2]).."^" + msg = string.format("%s\n>>> %s\n>>> %s", msg, srcline, idx) + end + error(msg) +end + +------------------------------------------------------------------------------- +-- +-- Sequence parser generator +-- +------------------------------------------------------------------------------- +-- Input fields: +-- +-- * [builder]: how to build an AST out of sequence parts. let [x] be the list +-- of subparser results (keywords are simply omitted). [builder] can be: +-- - [nil], in which case the result of parsing is simply [x] +-- - a string, which is then put as a tag on [x] +-- - a function, which takes [x] as a parameter and returns an AST. +-- +-- * [name]: the name of the parser. Used for debug messages +-- +-- * [transformers]: a list of AST->AST functions, applied in order on ASTs +-- returned by the parser. +-- +-- * Table-part entries corresponds to keywords (strings) and subparsers +-- (function and callable objects). +-- +-- After creation, the following fields are added: +-- * [parse] the parsing function lexer->AST +-- * [kind] == "sequence" +-- * [name] is set, if it wasn't in the input. +-- +------------------------------------------------------------------------------- +function sequence (p) + make_parser ("sequence", p) + + ------------------------------------------------------------------- + -- Parsing method + ------------------------------------------------------------------- + function p:parse (lx) + -- Raw parsing: + local fli = lx:lineinfo_right() + local seq = raw_parse_sequence (lx, self) + local lli = lx:lineinfo_left() + + -- Builder application: + local builder, tb = self.builder, type (self.builder) + if tb == "string" then seq.tag = builder + elseif tb == "function" or builder and builder.__call then seq = builder(seq) + elseif builder == nil then -- nothing + else error ("Invalid builder of type "..tb.." in sequence") end + seq = transform (seq, self, fli, lli) + assert (not seq or seq.lineinfo) + return seq + end + + ------------------------------------------------------------------- + -- Construction + ------------------------------------------------------------------- + -- Try to build a proper name + if not p.name and type(p[1])=="string" then + p.name = p[1].." ..." + if type(p[#p])=="string" then p.name = p.name .. " " .. p[#p] end + else + p.name = "" + end + + return p +end -- + + +------------------------------------------------------------------------------- +-- +-- Multiple, keyword-driven, sequence parser generator +-- +------------------------------------------------------------------------------- +-- in [p], useful fields are: +-- +-- * [transformers]: as usual +-- +-- * [name]: as usual +-- +-- * Table-part entries must be sequence parsers, or tables which can +-- be turned into a sequence parser by [gg.sequence]. These +-- sequences must start with a keyword, and this initial keyword +-- must be different for each sequence. The table-part entries will +-- be removed after [gg.multisequence] returns. +-- +-- * [default]: the parser to run if the next keyword in the lexer is +-- none of the registered initial keywords. If there's no default +-- parser and no suitable initial keyword, the multisequence parser +-- simply returns [false]. +-- +-- After creation, the following fields are added: +-- +-- * [parse] the parsing function lexer->AST +-- +-- * [sequences] the table of sequences, indexed by initial keywords. +-- +-- * [add] method takes a sequence parser or a config table for +-- [gg.sequence], and adds/replaces the corresponding sequence +-- parser. If the keyword was already used, the former sequence is +-- removed and a warning is issued. +-- +-- * [get] method returns a sequence by its initial keyword +-- +-- * [kind] == "multisequence" +-- +------------------------------------------------------------------------------- +function multisequence (p) + make_parser ("multisequence", p) + + ------------------------------------------------------------------- + -- Add a sequence (might be just a config table for [gg.sequence]) + ------------------------------------------------------------------- + function p:add (s) + -- compile if necessary: + local keyword = s[1] + if not is_parser(s) then sequence(s) end + if is_parser(s) ~= 'sequence' or type(keyword) ~= "string" then + if self.default then -- two defaults + error ("In a multisequence parser, all but one sequences ".. + "must start with a keyword") + else self.default = s end -- first default + elseif self.sequences[keyword] then -- duplicate keyword + eprintf (" *** Warning: keyword %q overloaded in multisequence ***", keyword) + self.sequences[keyword] = s + else -- newly caught keyword + self.sequences[keyword] = s + end + end -- + + ------------------------------------------------------------------- + -- Get the sequence starting with this keyword. [kw :: string] + ------------------------------------------------------------------- + function p:get (kw) return self.sequences [kw] end + + ------------------------------------------------------------------- + -- Remove the sequence starting with keyword [kw :: string] + ------------------------------------------------------------------- + function p:del (kw) + if not self.sequences[kw] then + eprintf("*** Warning: trying to delete sequence starting ".. + "with %q from a multisequence having no such ".. + "entry ***", kw) end + local removed = self.sequences[kw] + self.sequences[kw] = nil + return removed + end + + ------------------------------------------------------------------- + -- Parsing method + ------------------------------------------------------------------- + function p:parse (lx) + local fli = lx:lineinfo_right() + local x = raw_parse_multisequence (lx, self.sequences, self.default) + local lli = lx:lineinfo_left() + return transform (x, self, fli, lli) + end + + ------------------------------------------------------------------- + -- Construction + ------------------------------------------------------------------- + -- Register the sequences passed to the constructor. They're going + -- from the array part of the parser to the hash part of field + -- [sequences] + p.sequences = { } + for i=1, #p do p:add (p[i]); p[i] = nil end + + -- FIXME: why is this commented out? + --if p.default and not is_parser(p.default) then sequence(p.default) end + return p +end -- + + +------------------------------------------------------------------------------- +-- +-- Expression parser generator +-- +------------------------------------------------------------------------------- +-- +-- Expression configuration relies on three tables: [prefix], [infix] +-- and [suffix]. Moreover, the primary parser can be replaced by a +-- table: in this case the [primary] table will be passed to +-- [gg.multisequence] to create a parser. +-- +-- Each of these tables is a modified multisequence parser: the +-- differences with respect to regular multisequence config tables are: +-- +-- * the builder takes specific parameters: +-- - for [prefix], it takes the result of the prefix sequence parser, +-- and the prefixed expression +-- - for [infix], it takes the left-hand-side expression, the results +-- of the infix sequence parser, and the right-hand-side expression. +-- - for [suffix], it takes the suffixed expression, and theresult +-- of the suffix sequence parser. +-- +-- * the default field is a list, with parameters: +-- - [parser] the raw parsing function +-- - [transformers], as usual +-- - [prec], the operator's precedence +-- - [assoc] for [infix] table, the operator's associativity, which +-- can be "left", "right" or "flat" (default to left) +-- +-- In [p], useful fields are: +-- * [transformers]: as usual +-- * [name]: as usual +-- * [primary]: the atomic expression parser, or a multisequence config +-- table (mandatory) +-- * [prefix]: prefix operators config table, see above. +-- * [infix]: infix operators config table, see above. +-- * [suffix]: suffix operators config table, see above. +-- +-- After creation, these fields are added: +-- * [kind] == "expr" +-- * [parse] as usual +-- * each table is turned into a multisequence, and therefore has an +-- [add] method +-- +------------------------------------------------------------------------------- +function expr (p) + make_parser ("expr", p) + + ------------------------------------------------------------------- + -- parser method. + -- In addition to the lexer, it takes an optional precedence: + -- it won't read expressions whose precedence is lower or equal + -- to [prec]. + ------------------------------------------------------------------- + function p:parse (lx, prec) + prec = prec or 0 + + ------------------------------------------------------ + -- Extract the right parser and the corresponding + -- options table, for (pre|in|suff)fix operators. + -- Options include prec, assoc, transformers. + ------------------------------------------------------ + local function get_parser_info (tab) + local p2 = tab:get (lx:is_keyword (lx:peek())) + if p2 then -- keyword-based sequence found + local function parser(lx) return raw_parse_sequence(lx, p2) end + return parser, p2 + else -- Got to use the default parser + local d = tab.default + if d then return d.parse or d.parser, d + else return false, false end + end + end + + ------------------------------------------------------ + -- Look for a prefix sequence. Multiple prefixes are + -- handled through the recursive [p.parse] call. + -- Notice the double-transform: one for the primary + -- expr, and one for the one with the prefix op. + ------------------------------------------------------ + local function handle_prefix () + local fli = lx:lineinfo_right() + local p2_func, p2 = get_parser_info (self.prefix) + local op = p2_func and p2_func (lx) + if op then -- Keyword-based sequence found + local ili = lx:lineinfo_right() -- Intermediate LineInfo + local e = p2.builder (op, self:parse (lx, p2.prec)) + local lli = lx:lineinfo_left() + return transform (transform (e, p2, ili, lli), self, fli, lli) + else -- No prefix found, get a primary expression + local e = self.primary(lx) + local lli = lx:lineinfo_left() + return transform (e, self, fli, lli) + end + end -- + + ------------------------------------------------------ + -- Look for an infix sequence+right-hand-side operand. + -- Return the whole binary expression result, + -- or false if no operator was found. + ------------------------------------------------------ + local function handle_infix (e) + local p2_func, p2 = get_parser_info (self.infix) + if not p2 then return false end + + ----------------------------------------- + -- Handle flattening operators: gather all operands + -- of the series in [list]; when a different operator + -- is found, stop, build from [list], [transform] and + -- return. + ----------------------------------------- + if (not p2.prec or p2.prec>prec) and p2.assoc=="flat" then + local fli = lx:lineinfo_right() + local pflat, list = p2, { e } + repeat + local op = p2_func(lx) + if not op then break end + table.insert (list, self:parse (lx, p2.prec)) + local _ -- We only care about checking that p2==pflat + _, p2 = get_parser_info (self.infix) + until p2 ~= pflat + local e2 = pflat.builder (list) + local lli = lx:lineinfo_left() + return transform (transform (e2, pflat, fli, lli), self, fli, lli) + + ----------------------------------------- + -- Handle regular infix operators: [e] the LHS is known, + -- just gather the operator and [e2] the RHS. + -- Result goes in [e3]. + ----------------------------------------- + elseif p2.prec and p2.prec>prec or + p2.prec==prec and p2.assoc=="right" then + local fli = e.lineinfo.first -- lx:lineinfo_right() + local op = p2_func(lx) + if not op then return false end + local e2 = self:parse (lx, p2.prec) + local e3 = p2.builder (e, op, e2) + local lli = lx:lineinfo_left() + return transform (transform (e3, p2, fli, lli), self, fli, lli) + + ----------------------------------------- + -- Check for non-associative operators, and complain if applicable. + ----------------------------------------- + elseif p2.assoc=="none" and p2.prec==prec then + parse_error (lx, "non-associative operator!") + --PATCHED:LuaInspect: parser_error -> parse_error + + ----------------------------------------- + -- No infix operator suitable at that precedence + ----------------------------------------- + else return false end + + end -- + + ------------------------------------------------------ + -- Look for a suffix sequence. + -- Return the result of suffix operator on [e], + -- or false if no operator was found. + ------------------------------------------------------ + local function handle_suffix (e) + -- FIXME bad fli, must take e.lineinfo.first + local p2_func, p2 = get_parser_info (self.suffix) + if not p2 then return false end + if not p2.prec or p2.prec>=prec then + --local fli = lx:lineinfo_right() + local fli = e.lineinfo.first + local op = p2_func(lx) + if not op then return false end + local lli = lx:lineinfo_left() + e = p2.builder (e, op) + e = transform (transform (e, p2, fli, lli), self, fli, lli) + return e + end + return false + end -- + + ------------------------------------------------------ + -- Parser body: read suffix and (infix+operand) + -- extensions as long as we're able to fetch more at + -- this precedence level. + ------------------------------------------------------ + local e = handle_prefix() + repeat + local x = handle_suffix (e); e = x or e + local y = handle_infix (e); e = y or e + until not (x or y) + + -- No transform: it already happened in operators handling + return e + end -- + + ------------------------------------------------------------------- + -- Construction + ------------------------------------------------------------------- + if not p.primary then p.primary=p[1]; p[1]=nil end + for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do + if not p[t] then p[t] = { } end + if not is_parser(p[t]) then multisequence(p[t]) end + end + function p:add(...) return self.primary:add(...) end + return p +end -- + + +------------------------------------------------------------------------------- +-- +-- List parser generator +-- +------------------------------------------------------------------------------- +-- In [p], the following fields can be provided in input: +-- +-- * [builder]: takes list of subparser results, returns AST +-- * [transformers]: as usual +-- * [name]: as usual +-- +-- * [terminators]: list of strings representing the keywords which +-- might mark the end of the list. When non-empty, the list is +-- allowed to be empty. A string is treated as a single-element +-- table, whose element is that string, e.g. ["do"] is the same as +-- [{"do"}]. +-- +-- * [separators]: list of strings representing the keywords which can +-- separate elements of the list. When non-empty, one of these +-- keyword has to be found between each element. Lack of a separator +-- indicates the end of the list. A string is treated as a +-- single-element table, whose element is that string, e.g. ["do"] +-- is the same as [{"do"}]. If [terminators] is empty/nil, then +-- [separators] has to be non-empty. +-- +-- After creation, the following fields are added: +-- * [parse] the parsing function lexer->AST +-- * [kind] == "list" +-- +------------------------------------------------------------------------------- +function list (p) + make_parser ("list", p) + + ------------------------------------------------------------------- + -- Parsing method + ------------------------------------------------------------------- + function p:parse (lx) + + ------------------------------------------------------ + -- Used to quickly check whether there's a terminator + -- or a separator immediately ahead + ------------------------------------------------------ + local function peek_is_in (keywords) + return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end + + local x = { } + local fli = lx:lineinfo_right() + + -- if there's a terminator to start with, don't bother trying + if not peek_is_in (self.terminators) then + repeat table.insert (x, self.primary (lx)) -- read one element + until + -- First reason to stop: There's a separator list specified, + -- and next token isn't one. Otherwise, consume it with [lx:next()] + self.separators and not(peek_is_in (self.separators) and lx:next()) or + -- Other reason to stop: terminator token ahead + peek_is_in (self.terminators) or + -- Last reason: end of file reached + lx:peek().tag=="Eof" + end + + local lli = lx:lineinfo_left() + + -- Apply the builder. It can be a string, or a callable value, + -- or simply nothing. + local b = self.builder + if b then + if type(b)=="string" then x.tag = b -- b is a string, use it as a tag + elseif type(b)=="function" then x=b(x) + else + local bmt = getmetatable(b) + if bmt and bmt.__call then x=b(x) end + end + end + return transform (x, self, fli, lli) + end -- + + ------------------------------------------------------------------- + -- Construction + ------------------------------------------------------------------- + if not p.primary then p.primary = p[1]; p[1] = nil end + if type(p.terminators) == "string" then p.terminators = { p.terminators } + elseif p.terminators and #p.terminators == 0 then p.terminators = nil end + if type(p.separators) == "string" then p.separators = { p.separators } + elseif p.separators and #p.separators == 0 then p.separators = nil end + + return p +end -- + + +------------------------------------------------------------------------------- +-- +-- Keyword-conditionned parser generator +-- +------------------------------------------------------------------------------- +-- +-- Only apply a parser if a given keyword is found. The result of +-- [gg.onkeyword] parser is the result of the subparser (modulo +-- [transformers] applications). +-- +-- lineinfo: the keyword is *not* included in the boundaries of the +-- resulting lineinfo. A review of all usages of gg.onkeyword() in the +-- implementation of metalua has shown that it was the appropriate choice +-- in every case. +-- +-- Input fields: +-- +-- * [name]: as usual +-- +-- * [transformers]: as usual +-- +-- * [peek]: if non-nil, the conditionning keyword is left in the lexeme +-- stream instead of being consumed. +-- +-- * [primary]: the subparser. +-- +-- * [keywords]: list of strings representing triggering keywords. +-- +-- * Table-part entries can contain strings, and/or exactly one parser. +-- Strings are put in [keywords], and the parser is put in [primary]. +-- +-- After the call, the following fields will be set: +-- +-- * [parse] the parsing method +-- * [kind] == "onkeyword" +-- * [primary] +-- * [keywords] +-- +------------------------------------------------------------------------------- +function onkeyword (p) + make_parser ("onkeyword", p) + + ------------------------------------------------------------------- + -- Parsing method + ------------------------------------------------------------------- + function p:parse(lx) + if lx:is_keyword (lx:peek(), unpack(self.keywords)) then + --local fli = lx:lineinfo_right() + if not self.peek then lx:next() end + local content = self.primary (lx) + --local lli = lx:lineinfo_left() + local fli, lli = content.lineinfo.first, content.lineinfo.last + return transform (content, p, fli, lli) + else return false end + end + + ------------------------------------------------------------------- + -- Construction + ------------------------------------------------------------------- + if not p.keywords then p.keywords = { } end + for _, x in ipairs(p) do + if type(x)=="string" then table.insert (p.keywords, x) + else assert (not p.primary and is_parser (x)); p.primary = x end + end + if not next (p.keywords) then + eprintf("Warning, no keyword to trigger gg.onkeyword") end + assert (p.primary, 'no primary parser in gg.onkeyword') + return p +end -- + + +------------------------------------------------------------------------------- +-- +-- Optional keyword consummer pseudo-parser generator +-- +------------------------------------------------------------------------------- +-- +-- This doesn't return a real parser, just a function. That function parses +-- one of the keywords passed as parameters, and returns it. It returns +-- [false] if no matching keyword is found. +-- +-- Notice that tokens returned by lexer already carry lineinfo, therefore +-- there's no need to add them, as done usually through transform() calls. +------------------------------------------------------------------------------- +function optkeyword (...) + local args = {...} + if type (args[1]) == "table" then + assert (#args == 1) + args = args[1] + end + for _, v in ipairs(args) do assert (type(v)=="string") end + return function (lx) + local x = lx:is_keyword (lx:peek(), unpack (args)) + if x then lx:next(); return x + else return false end + end +end + + +------------------------------------------------------------------------------- +-- +-- Run a parser with a special lexer +-- +------------------------------------------------------------------------------- +-- +-- This doesn't return a real parser, just a function. +-- First argument is the lexer class to be used with the parser, +-- 2nd is the parser itself. +-- The resulting parser returns whatever the argument parser does. +-- +------------------------------------------------------------------------------- +function with_lexer(new_lexer, parser) + + ------------------------------------------------------------------- + -- Most gg functions take their parameters in a table, so it's + -- better to silently accept when with_lexer{ } is called with + -- its arguments in a list: + ------------------------------------------------------------------- + if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then + return with_lexer(unpack(new_lexer)) + end + + ------------------------------------------------------------------- + -- Save the current lexer, switch it for the new one, run the parser, + -- restore the previous lexer, even if the parser caused an error. + ------------------------------------------------------------------- + return function (lx) + local old_lexer = getmetatable(lx) + lx:sync() + setmetatable(lx, new_lexer) + local status, result = pcall(parser, lx) + lx:sync() + setmetatable(lx, old_lexer) + if status then return result else error(result) end + end +end diff --git a/builders/lua-inspect/metalualib/lexer.lua b/builders/lua-inspect/metalualib/lexer.lua new file mode 100644 index 000000000..4b2d4ced7 --- /dev/null +++ b/builders/lua-inspect/metalualib/lexer.lua @@ -0,0 +1,513 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: generic Lua-style lexer definition. You need this plus +-- some keyword additions to create the complete Lua lexer, +-- as is done in mlp_lexer.lua. +-- +-- TODO: +-- +-- * Make it easy to define new flavors of strings. Replacing the +-- lexer.patterns.long_string regexp by an extensible list, with +-- customizable token tag, would probably be enough. Maybe add: +-- + an index of capture for the regexp, that would specify +-- which capture holds the content of the string-like token +-- + a token tag +-- + or a string->string transformer function. +-- +-- * There are some _G.table to prevent a namespace clash which has +-- now disappered. remove them. +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- + +module ("lexer", package.seeall) + +require 'metalua.runtime' + + +lexer = { alpha={ }, sym={ } } +lexer.__index=lexer + +local debugf = function() end +--local debugf=printf + +---------------------------------------------------------------------- +-- Patterns used by [lexer:extract] to decompose the raw string into +-- correctly tagged tokens. +---------------------------------------------------------------------- +lexer.patterns = { + spaces = "^[ \r\n\t]*()", + short_comment = "^%-%-([^\n]*)()\n", + final_short_comment = "^%-%-([^\n]*)()$", + long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()", + long_string = "^%[(=*)%[\n?(.-)%]%1%]()", + number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" }, + number_exponant = "^[eE][%+%-]?%d+()", + number_hex = "^0[xX]%x+()", + word = "^([%a_][%w_]*)()" +} + +---------------------------------------------------------------------- +-- unescape a whole string, applying [unesc_digits] and +-- [unesc_letter] as many times as required. +---------------------------------------------------------------------- +local function unescape_string (s) + + -- Turn the digits of an escape sequence into the corresponding + -- character, e.g. [unesc_digits("123") == string.char(123)]. + local function unesc_digits (backslashes, digits) + if #backslashes%2==0 then + -- Even number of backslashes, they escape each other, not the digits. + -- Return them so that unesc_letter() can treaat them + return backslashes..digits + else + -- Remove the odd backslash, which escapes the number sequence. + -- The rest will be returned and parsed by unesc_letter() + backslashes = backslashes :sub (1,-2) + end + local k, j, i = digits:reverse():byte(1, 3) + local z = _G.string.byte "0" + local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z + if code > 255 then + error ("Illegal escape sequence '\\"..digits.. + "' in string: ASCII codes must be in [0..255]") + end + return backslashes .. string.char (code) + end + + -- Take a letter [x], and returns the character represented by the + -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"]. + local function unesc_letter(x) + local t = { + a = "\a", b = "\b", f = "\f", + n = "\n", r = "\r", t = "\t", v = "\v", + ["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" } + return t[x] or error([[Unknown escape sequence '\]]..x..[[']]) + end + + return s + :gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits) + :gsub ("\\(%D)",unesc_letter) +end + +lexer.extractors = { + "skip_whitespaces_and_comments", + "extract_short_string", "extract_word", "extract_number", + "extract_long_string", "extract_symbol" } + +lexer.token_metatable = { +-- __tostring = function(a) +-- return string.format ("`%s{'%s'}",a.tag, a[1]) +-- end +} + +lexer.lineinfo_metatable = { } + +---------------------------------------------------------------------- +-- Really extract next token fron the raw string +-- (and update the index). +-- loc: offset of the position just after spaces and comments +-- previous_i: offset in src before extraction began +---------------------------------------------------------------------- +function lexer:extract () + local previous_i = self.i + local loc = self.i + local eof, token + + -- Put line info, comments and metatable around the tag and content + -- provided by extractors, thus returning a complete lexer token. + -- first_line: line # at the beginning of token + -- first_column_offset: char # of the last '\n' before beginning of token + -- i: scans from beginning of prefix spaces/comments to end of token. + local function build_token (tag, content) + assert (tag and content) + local i, first_line, first_column_offset, previous_line_length = + previous_i, self.line, self.column_offset, nil + + -- update self.line and first_line. i := indexes of '\n' chars + while true do + i = self.src:match ("\n()", i, true) + --PATCHED:LuaInspect: above line was not counting line numbers + -- correctly when first character of file was a \n. + if not i or i>self.i then break end -- no more '\n' until end of token + previous_line_length = i - self.column_offset + if loc and i <= loc then -- '\n' before beginning of token + first_column_offset = i + first_line = first_line+1 + end + self.line = self.line+1 + self.column_offset = i + end + + -- lineinfo entries: [1]=line, [2]=column, [3]=char, [4]=filename + local fli = { first_line, loc-first_column_offset, loc, self.src_name } + local lli = { self.line, self.i-self.column_offset-1, self.i-1, self.src_name } + --Pluto barfes when the metatable is set:( + setmetatable(fli, lexer.lineinfo_metatable) + setmetatable(lli, lexer.lineinfo_metatable) + local a = { tag = tag, lineinfo = { first=fli, last=lli }, content } + if lli[2]==-1 then lli[1], lli[2] = lli[1]-1, previous_line_length-1 end + if #self.attached_comments > 0 then + a.lineinfo.comments = self.attached_comments + fli.comments = self.attached_comments + if self.lineinfo_last then + self.lineinfo_last.comments = self.attached_comments + end + end + self.attached_comments = { } + return setmetatable (a, self.token_metatable) + end -- + + for ext_idx, extractor in ipairs(self.extractors) do + -- printf("method = %s", method) + local tag, content = self [extractor] (self) + -- [loc] is placed just after the leading whitespaces and comments; + -- for this to work, the whitespace extractor *must be* at index 1. + if ext_idx==1 then loc = self.i end + + if tag then + --printf("`%s{ %q }\t%i", tag, content, loc); + return build_token (tag, content) + end + end + + error "None of the lexer extractors returned anything!" +end + +---------------------------------------------------------------------- +-- skip whites and comments +-- FIXME: doesn't take into account: +-- - unterminated long comments +-- - short comments at last line without a final \n +---------------------------------------------------------------------- +function lexer:skip_whitespaces_and_comments() + local table_insert = _G.table.insert + repeat -- loop as long as a space or comment chunk is found + local _, j + local again = false + local last_comment_content = nil + -- skip spaces + self.i = self.src:match (self.patterns.spaces, self.i) + -- skip a long comment if any + _, last_comment_content, j = + self.src :match (self.patterns.long_comment, self.i) + if j then + table_insert(self.attached_comments, + {last_comment_content, self.i, j, "long"}) + self.i=j; again=true + end + -- skip a short comment if any + last_comment_content, j = self.src:match (self.patterns.short_comment, self.i) + if j then + table_insert(self.attached_comments, + {last_comment_content, self.i, j, "short"}) + self.i=j; again=true + end + if self.i>#self.src then return "Eof", "eof" end + until not again + + if self.src:match (self.patterns.final_short_comment, self.i) then + return "Eof", "eof" end + --assert (not self.src:match(self.patterns.short_comment, self.i)) + --assert (not self.src:match(self.patterns.long_comment, self.i)) + -- --assert (not self.src:match(self.patterns.spaces, self.i)) + return +end + +---------------------------------------------------------------------- +-- extract a '...' or "..." short string +---------------------------------------------------------------------- +function lexer:extract_short_string() + -- [k] is the first unread char, [self.i] points to [k] in [self.src] + local j, k = self.i, self.src :sub (self.i,self.i) + if k~="'" and k~='"' then return end + local i = self.i + 1 + local j = i + while true do + -- k = opening char: either simple-quote or double-quote + -- i = index of beginning-of-string + -- x = next "interesting" character + -- j = position after interesting char + -- y = char just after x + local x, y + x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j) + if x == '\\' then j=j+1 -- don't parse escaped char + elseif x == k then break -- unescaped end of string + else -- eof or '\r' or '\n' reached before end of string + assert (not x or x=="\r" or x=="\n") + error "Unterminated string" + end + end + self.i = j + + return "String", unescape_string (self.src:sub (i,j-2)) +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer:extract_word() + -- Id / keyword + local word, j = self.src:match (self.patterns.word, self.i) + if word then + self.i = j + if self.alpha [word] then return "Keyword", word + else return "Id", word end + end +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer:extract_number() + -- Number + local j = self.src:match(self.patterns.number_hex, self.i) + if not j then + j = self.src:match (self.patterns.number_mantissa[1], self.i) or + self.src:match (self.patterns.number_mantissa[2], self.i) + if j then + j = self.src:match (self.patterns.number_exponant, j) or j; + end + end + if not j then return end + -- Number found, interpret with tonumber() and return it + local n = tonumber (self.src:sub (self.i, j-1)) + self.i = j + return "Number", n +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer:extract_long_string() + -- Long string + local _, content, j = self.src:match (self.patterns.long_string, self.i) + if j then self.i = j; return "String", content end +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer:extract_symbol() + -- compound symbol + local k = self.src:sub (self.i,self.i) + local symk = self.sym [k] + if not symk then + self.i = self.i + 1 + return "Keyword", k + end + for _, sym in pairs (symk) do + if sym == self.src:sub (self.i, self.i + #sym - 1) then + self.i = self.i + #sym; + return "Keyword", sym + end + end + -- single char symbol + self.i = self.i+1 + return "Keyword", k +end + +---------------------------------------------------------------------- +-- Add a keyword to the list of keywords recognized by the lexer. +---------------------------------------------------------------------- +function lexer:add (w, ...) + assert(not ..., "lexer:add() takes only one arg, although possibly a table") + if type (w) == "table" then + for _, x in ipairs (w) do self:add (x) end + else + if w:match (self.patterns.word .. "$") then self.alpha [w] = true + elseif w:match "^%p%p+$" then + local k = w:sub(1,1) + local list = self.sym [k] + if not list then list = { }; self.sym [k] = list end + _G.table.insert (list, w) + elseif w:match "^%p$" then return + else error "Invalid keyword" end + end +end + +---------------------------------------------------------------------- +-- Return the [n]th next token, without consumming it. +-- [n] defaults to 1. If it goes pass the end of the stream, an EOF +-- token is returned. +---------------------------------------------------------------------- +function lexer:peek (n) + if not n then n=1 end + if n > #self.peeked then + for i = #self.peeked+1, n do + self.peeked [i] = self:extract() + end + end + return self.peeked [n] +end + +---------------------------------------------------------------------- +-- Return the [n]th next token, removing it as well as the 0..n-1 +-- previous tokens. [n] defaults to 1. If it goes pass the end of the +-- stream, nil is returned. +---------------------------------------------------------------------- +function lexer:next (n) + n = n or 1 + self:peek (n) + local a + for i=1,n do + a = _G.table.remove (self.peeked, 1) + if a then + --debugf ("lexer:next() ==> %s %s", + -- table.tostring(a), tostring(a)) + end + self.lastline = a.lineinfo.last[1] + end + self.lineinfo_last = a.lineinfo.last + return a + --PATCHED:LuaInspect: eof_token was undefined (nil). +end + +---------------------------------------------------------------------- +-- Returns an object which saves the stream's current state. +---------------------------------------------------------------------- +-- FIXME there are more fields than that to save +function lexer:save () return { self.i; _G.table.cat(self.peeked) } end + +---------------------------------------------------------------------- +-- Restore the stream's state, as saved by method [save]. +---------------------------------------------------------------------- +-- FIXME there are more fields than that to restore +function lexer:restore (s) self.i=s[1]; self.peeked=s[2] end + +---------------------------------------------------------------------- +-- Resynchronize: cancel any token in self.peeked, by emptying the +-- list and resetting the indexes +---------------------------------------------------------------------- +function lexer:sync() + local p1 = self.peeked[1] + if p1 then + li = p1.lineinfo.first + self.line, self.i = li[1], li[3] + self.column_offset = self.i - li[2] + self.peeked = { } + self.attached_comments = p1.lineinfo.first.comments or { } + end +end + +---------------------------------------------------------------------- +-- Take the source and offset of an old lexer. +---------------------------------------------------------------------- +function lexer:takeover(old) + self:sync() + self.line, self.column_offset, self.i, self.src, self.attached_comments = + old.line, old.column_offset, old.i, old.src, old.attached_comments + return self +end + +-- function lexer:lineinfo() +-- if self.peeked[1] then return self.peeked[1].lineinfo.first +-- else return { self.line, self.i-self.column_offset, self.i } end +-- end + + +---------------------------------------------------------------------- +-- Return the current position in the sources. This position is between +-- two tokens, and can be within a space / comment area, and therefore +-- have a non-null width. :lineinfo_left() returns the beginning of the +-- separation area, :lineinfo_right() returns the end of that area. +-- +-- ____ last consummed token ____ first unconsummed token +-- / / +-- XXXXX YYYYY +-- \____ \____ +-- :lineinfo_left() :lineinfo_right() +---------------------------------------------------------------------- +function lexer:lineinfo_right() + return self:peek(1).lineinfo.first +end + +function lexer:lineinfo_left() + return self.lineinfo_last +end + +---------------------------------------------------------------------- +-- Create a new lexstream. +---------------------------------------------------------------------- +function lexer:newstream (src_or_stream, name) + name = name or "?" + if type(src_or_stream)=='table' then -- it's a stream + return setmetatable ({ }, self) :takeover (src_or_stream) + elseif type(src_or_stream)=='string' then -- it's a source string + local src = src_or_stream + local stream = { + src_name = name; -- Name of the file + src = src; -- The source, as a single string + peeked = { }; -- Already peeked, but not discarded yet, tokens + i = 1; -- Character offset in src + line = 1; -- Current line number + column_offset = 0; -- distance from beginning of file to last '\n' + attached_comments = { },-- comments accumulator + lineinfo_last = { 1, 1, 1, name } + } + setmetatable (stream, self) + + -- skip initial sharp-bang for unix scripts + -- FIXME: redundant with mlp.chunk() + if src and src :match "^#" then stream.i = src :find "\n" + 1 end + return stream + else + assert(false, ":newstream() takes a source string or a stream, not a ".. + type(src_or_stream)) + end +end + +---------------------------------------------------------------------- +-- if there's no ... args, return the token a (whose truth value is +-- true) if it's a `Keyword{ }, or nil. If there are ... args, they +-- have to be strings. if the token a is a keyword, and it's content +-- is one of the ... args, then returns it (it's truth value is +-- true). If no a keyword or not in ..., return nil. +---------------------------------------------------------------------- +function lexer:is_keyword (a, ...) + if not a or a.tag ~= "Keyword" then return false end + local words = {...} + if #words == 0 then return a[1] end + for _, w in ipairs (words) do + if w == a[1] then return w end + end + return false +end + +---------------------------------------------------------------------- +-- Cause an error if the next token isn't a keyword whose content +-- is listed among ... args (which have to be strings). +---------------------------------------------------------------------- +function lexer:check (...) + local words = {...} + local a = self:next() + local function err () + error ("Got " .. tostring (a) .. + ", expected one of these keywords : '" .. + _G.table.concat (words,"', '") .. "'") end + + if not a or a.tag ~= "Keyword" then err () end + if #words == 0 then return a[1] end + for _, w in ipairs (words) do + if w == a[1] then return w end + end + err () +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer:clone() + local clone = { + alpha = table.deep_copy(self.alpha), + sym = table.deep_copy(self.sym) } + setmetatable(clone, self) + clone.__index = clone + return clone +end diff --git a/builders/lua-inspect/metalualib/metalua/base.lua b/builders/lua-inspect/metalualib/metalua/base.lua new file mode 100644 index 000000000..1e902726e --- /dev/null +++ b/builders/lua-inspect/metalualib/metalua/base.lua @@ -0,0 +1,107 @@ +---------------------------------------------------------------------- +---------------------------------------------------------------------- +-- +-- Base library extension +-- +---------------------------------------------------------------------- +---------------------------------------------------------------------- + +if not metalua then metalua = {} end --PATCHED.. rawset(getfenv(), 'metalua', { }) end +metalua.version = "v-0.5" + +if not rawpairs then + rawpairs, rawipairs, rawtype = pairs, ipairs, type +end + +function pairsmt(x) -- PATCHED:LuaInspect [*] + assert(type(x)=='table', 'pairs() expects a table') + local mt = getmetatable(x) + if mt then + local mtp = mt.__pairs + if mtp then return mtp(x) end + end + return rawpairs(x) +end + +function ipairsmt(x) --PATCHED:LuaInspect [*] + assert(type(x)=='table', 'ipairs() expects a table') + local mt = getmetatable(x) + if mt then + local mti = mt.__ipairs + if mti then return mti(x) end + end + return rawipairs(x) +end +--PATCHED:LuaInspect: [*] For performance, compatibility, +-- and debugging reasons, avoid overriding builtins. + + +--[[ +function type(x) + local mt = getmetatable(x) + if mt then + local mtt = mt.__type + if mtt then return mtt end + end + return rawtype(x) +end +]] + +function min (a, ...) + for n in values{...} do if na then a=n end end + return a +end + +function o (...) + local args = {...} + local function g (...) + local result = {...} + for i=#args, 1, -1 do result = {args[i](unpack(result))} end + return unpack (result) + end + return g +end + +function id (...) return ... end +function const (k) return function () return k end end + +function printf(...) return print(string.format(...)) end +function eprintf(...) + io.stderr:write(string.format(...).."\n") +end + +function ivalues (x) + assert(type(x)=='table', 'ivalues() expects a table') + local i = 1 + local function iterator () + local r = x[i]; i=i+1; return r + end + return iterator +end + + +function values (x) + assert(type(x)=='table', 'values() expects a table') + local function iterator (state) + local it + state.content, it = next(state.list, state.content) + return it + end + return iterator, { list = x } +end + +function keys (x) + assert(type(x)=='table', 'keys() expects a table') + local function iterator (state) + local it = next(state.list, state.content) + state.content = it + return it + end + return iterator, { list = x } +end + diff --git a/builders/lua-inspect/metalualib/metalua/runtime.lua b/builders/lua-inspect/metalualib/metalua/runtime.lua new file mode 100644 index 000000000..5fb0cbb63 --- /dev/null +++ b/builders/lua-inspect/metalualib/metalua/runtime.lua @@ -0,0 +1,3 @@ +require 'metalua.base' +require 'metalua.table2' +require 'metalua.string2' diff --git a/builders/lua-inspect/metalualib/metalua/string2.lua b/builders/lua-inspect/metalualib/metalua/string2.lua new file mode 100644 index 000000000..60c186d31 --- /dev/null +++ b/builders/lua-inspect/metalualib/metalua/string2.lua @@ -0,0 +1,44 @@ + +---------------------------------------------------------------------- +---------------------------------------------------------------------- +-- +-- String module extension +-- +---------------------------------------------------------------------- +---------------------------------------------------------------------- + +-- Courtesy of lua-users.org +function string.split(str, pat) + local t = {} + local fpat = "(.-)" .. pat + local last_end = 1 + local s, e, cap = string.find(str, fpat, 1) + while s do + if s ~= 1 or cap ~= "" then + table.insert(t,cap) + end + last_end = e+1 + s, e, cap = string.find(str, fpat, last_end) + end + if last_end <= string.len(str) then + cap = string.sub(str, last_end) + table.insert(t, cap) + end + return t +end + +-- "match" is regularly used as a keyword for pattern matching, +-- so here is an always available substitute. +string.strmatch = string["match"] + +-- change a compiled string into a function +function string.undump(str) + if str:strmatch '^\027LuaQ' or str:strmatch '^#![^\n]+\n\027LuaQ' then + local f = (lua_loadstring or loadstring)(str) + return f + else + error "Not a chunk dump" + end +end + +return string \ No newline at end of file diff --git a/builders/lua-inspect/metalualib/metalua/table2.lua b/builders/lua-inspect/metalualib/metalua/table2.lua new file mode 100644 index 000000000..b4962cac1 --- /dev/null +++ b/builders/lua-inspect/metalualib/metalua/table2.lua @@ -0,0 +1,372 @@ +--------------------------------------------------------------------- +---------------------------------------------------------------------- +-- +-- Table module extension +-- +---------------------------------------------------------------------- +---------------------------------------------------------------------- + +-- todo: table.scan (scan1?) fold1? flip? + +function table.transpose(t) + local tt = { } + for a, b in pairs(t) do tt[b] = a end + return tt +end + +function table.iforeach(f, ...) + -- assert (type (f) == "function") [wouldn't allow metamethod __call] + local nargs = select("#", ...) + if nargs==1 then -- Quick iforeach (most common case), just one table arg + local t = ... + assert (type (t) == "table") + for i = 1, #t do + local result = f (t[i]) + -- If the function returns non-false, stop iteration + if result then return result end + end + else -- advanced case: boundaries and/or multiple tables + -- 1 - find boundaries if any + local args, fargs, first, last, arg1 = {...}, { } + if type(args[1]) ~= "number" then first, arg1 = 1, 1 + elseif type(args[2]) ~= "number" then first, last, arg1 = 1, args[1], 2 + else first, last, i = args[1], args[2], 3 end + assert (nargs > arg1) + -- 2 - determine upper boundary if not given + if not last then for i = arg1, nargs do + assert (type (args[i]) == "table") + last = max (#args[i], last) + end end + -- 3 - perform the iteration + for i = first, last do + for j = arg1, nargs do fargs[j] = args[j][i] end -- build args list + local result = f (unpack (fargs)) -- here is the call + -- If the function returns non-false, stop iteration + if result then return result end + end + end +end + +function table.imap (f, ...) + local result, idx = { }, 1 + local function g(...) result[idx] = f(...); idx=idx+1 end + table.iforeach(g, ...) + return result +end + +function table.ifold (f, acc, ...) + local function g(...) acc = f (acc,...) end + table.iforeach (g, ...) + return acc +end + +-- function table.ifold1 (f, ...) +-- return table.ifold (f, acc, 2, false, ...) +-- end + +function table.izip(...) + local function g(...) return {...} end + return table.imap(g, ...) +end + +function table.ifilter(f, t) + local yes, no = { }, { } + for i=1,#t do table.insert (f(t[i]) and yes or no, t[i]) end + return yes, no +end + +function table.icat(...) + local result = { } + for t in values {...} do + for x in values (t) do + table.insert (result, x) + end + end + return result +end + +function table.iflatten (x) return table.icat (unpack (x)) end + +function table.irev (t) + local result, nt = { }, #t + for i=0, nt-1 do result[nt-i] = t[i+1] end + return result +end + +function table.isub (t, ...) + local ti, u = table.insert, { } + local args, nargs = {...}, select("#", ...) + for i=1, nargs/2 do + local a, b = args[2*i-1], args[2*i] + for i=a, b, a<=b and 1 or -1 do ti(u, t[i]) end + end + return u +end + +function table.iall (f, ...) + local result = true + local function g(...) return not f(...) end + return not table.iforeach(g, ...) + --return result +end + +function table.iany (f, ...) + local function g(...) return not f(...) end + return not table.iall(g, ...) +end + +function table.shallow_copy(x) + local y={ } + for k, v in pairs(x) do y[k]=v end + return y +end + +-- Warning, this is implementation dependent: it relies on +-- the fact the [next()] enumerates the array-part before the hash-part. +function table.cat(...) + local y={ } + for x in values{...} do + -- cat array-part + for _, v in ipairs(x) do table.insert(y,v) end + -- cat hash-part + local lx, k = #x + if lx>0 then k=next(x,lx) else k=next(x) end + while k do y[k]=x[k]; k=next(x,k) end + end + return y +end + +function table.deep_copy(x) + local tracker = { } + local function aux (x) + if type(x) == "table" then + local y=tracker[x] + if y then return y end + y = { }; tracker[x] = y + setmetatable (y, getmetatable (x)) + for k,v in pairs(x) do y[aux(k)] = aux(v) end + return y + else return x end + end + return aux(x) +end + +function table.override(dst, src) + for k, v in pairs(src) do dst[k] = v end + for i = #src+1, #dst do dst[i] = nil end + return dst +end + + +function table.range(a,b,c) + if not b then assert(not(c)); b=a; a=1 + elseif not c then c = (b>=a) and 1 or -1 end + local result = { } + for i=a, b, c do table.insert(result, i) end + return result +end + +-- FIXME: new_indent seems to be always nil?! +-- FIXME: accumulator function should be configurable, +-- so that print() doesn't need to bufferize the whole string +-- before starting to print. +function table.tostring(t, ...) + local PRINT_HASH, HANDLE_TAG, FIX_INDENT, LINE_MAX, INITIAL_INDENT = true, true + for _, x in ipairs {...} do + if type(x) == "number" then + if not LINE_MAX then LINE_MAX = x + else INITIAL_INDENT = x end + elseif x=="nohash" then PRINT_HASH = false + elseif x=="notag" then HANDLE_TAG = false + else + local n = string['match'](x, "^indent%s*(%d*)$") + if n then FIX_INDENT = tonumber(n) or 3 end + end + end + LINE_MAX = LINE_MAX or math.huge + INITIAL_INDENT = INITIAL_INDENT or 1 + + local current_offset = 0 -- indentation level + local xlen_cache = { } -- cached results for xlen() + local acc_list = { } -- Generated bits of string + local function acc(...) -- Accumulate a bit of string + local x = table.concat{...} + current_offset = current_offset + #x + table.insert(acc_list, x) + end + local function valid_id(x) + -- FIXME: we should also reject keywords; but the list of + -- current keywords is not fixed in metalua... + return type(x) == "string" + and string['match'](x, "^[a-zA-Z_][a-zA-Z0-9_]*$") + end + + -- Compute the number of chars it would require to display the table + -- on a single line. Helps to decide whether some carriage returns are + -- required. Since the size of each sub-table is required many times, + -- it's cached in [xlen_cache]. + local xlen_type = { } + local function xlen(x, nested) + nested = nested or { } + if x==nil then return #"nil" end + --if nested[x] then return #tostring(x) end -- already done in table + local len = xlen_cache[x] + if len then return len end + local f = xlen_type[type(x)] + if not f then return #tostring(x) end + len = f (x, nested) + xlen_cache[x] = len + return len + end + + -- optim: no need to compute lengths if I'm not going to use them + -- anyway. + if LINE_MAX == math.huge then xlen = function() return 0 end end + + xlen_type["nil"] = function () return 3 end + function xlen_type.number (x) return #tostring(x) end + function xlen_type.boolean (x) return x and 4 or 5 end + function xlen_type.string (x) return #string.format("%q",x) end + function xlen_type.table (adt, nested) + + -- Circular references detection + if nested [adt] then return #tostring(adt) end + nested [adt] = true + + local has_tag = HANDLE_TAG and valid_id(adt.tag) + local alen = #adt + local has_arr = alen>0 + local has_hash = false + local x = 0 + + if PRINT_HASH then + -- first pass: count hash-part + for k, v in pairs(adt) do + if k=="tag" and has_tag then + -- this is the tag -> do nothing! + elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 then + -- array-part pair -> do nothing! + else + has_hash = true + if valid_id(k) then x=x+#k + else x = x + xlen (k, nested) + 2 end -- count surrounding brackets + x = x + xlen (v, nested) + 5 -- count " = " and ", " + end + end + end + + for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", " + + nested[adt] = false -- No more nested calls + + if not (has_tag or has_arr or has_hash) then return 3 end + if has_tag then x=x+#adt.tag+1 end + if not (has_arr or has_hash) then return x end + if not has_hash and alen==1 and type(adt[1])~="table" then + return x-2 -- substract extraneous ", " + end + return x+2 -- count "{ " and " }", substract extraneous ", " + end + + -- Recursively print a (sub) table at given indentation level. + -- [newline] indicates whether newlines should be inserted. + local function rec (adt, nested, indent) + if not FIX_INDENT then indent = current_offset end + local function acc_newline() + acc ("\n"); acc (string.rep (" ", indent)) + current_offset = indent + end + local x = { } + x["nil"] = function() acc "nil" end + function x.number() acc (tostring (adt)) end + --function x.string() acc (string.format ("%q", adt)) end + function x.string() acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end + function x.boolean() acc (adt and "true" or "false") end + function x.table() + if nested[adt] then acc(tostring(adt)); return end + nested[adt] = true + + + local has_tag = HANDLE_TAG and valid_id(adt.tag) + local alen = #adt + local has_arr = alen>0 + local has_hash = false + + if has_tag then acc("`"); acc(adt.tag) end + + -- First pass: handle hash-part + if PRINT_HASH then + for k, v in pairs(adt) do + -- pass if the key belongs to the array-part or is the "tag" field + if not (k=="tag" and HANDLE_TAG) and + not (type(k)=="number" and k<=alen and math.fmod(k,1)==0) then + + -- Is it the first time we parse a hash pair? + if not has_hash then + acc "{ " + if not FIX_INDENT then indent = current_offset end + else acc ", " end + + -- Determine whether a newline is required + local is_id, expected_len = valid_id(k) + if is_id then expected_len = #k + xlen (v, nested) + #" = , " + else expected_len = xlen (k, nested) + + xlen (v, nested) + #"[] = , " end + if has_hash and expected_len + current_offset > LINE_MAX + then acc_newline() end + + -- Print the key + if is_id then acc(k); acc " = " + else acc "["; rec (k, nested, indent+(FIX_INDENT or 0)); acc "] = " end + + -- Print the value + rec (v, nested, indent+(FIX_INDENT or 0)) + has_hash = true + end + end + end + + -- Now we know whether there's a hash-part, an array-part, and a tag. + -- Tag and hash-part are already printed if they're present. + if not has_tag and not has_hash and not has_arr then acc "{ }"; + elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc + else + assert (has_hash or has_arr) + local no_brace = false + if has_hash and has_arr then acc ", " + elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then + -- No brace required; don't print "{", remember not to print "}" + acc (" "); rec (adt[1], nested, indent+(FIX_INDENT or 0)) + no_brace = true + elseif not has_hash then + -- Braces required, but not opened by hash-part handler yet + acc "{ " + if not FIX_INDENT then indent = current_offset end + end + + -- 2nd pass: array-part + if not no_brace and has_arr then + rec (adt[1], nested, indent+(FIX_INDENT or 0)) + for i=2, alen do + acc ", "; + if current_offset + xlen (adt[i], { }) > LINE_MAX + then acc_newline() end + rec (adt[i], nested, indent+(FIX_INDENT or 0)) + end + end + if not no_brace then acc " }" end + end + nested[adt] = false -- No more nested calls + end + local y = x[type(adt)] + if y then y() else acc(tostring(adt)) end + end + --printf("INITIAL_INDENT = %i", INITIAL_INDENT) + current_offset = INITIAL_INDENT or 0 + rec(t, { }, 0) + return table.concat (acc_list) +end + +function table.print(...) return print(table.tostring(...)) end + +return table \ No newline at end of file diff --git a/builders/lua-inspect/metalualib/mlp_expr.lua b/builders/lua-inspect/metalualib/mlp_expr.lua new file mode 100644 index 000000000..091f92e2c --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_expr.lua @@ -0,0 +1,204 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mlp_expr.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: metalua parser, expression parser. This is part of the +-- definition of module [mlp]. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- +-- History: +-- $Log: mlp_expr.lua,v $ +-- Revision 1.7 2006/11/15 09:07:50 fab13n +-- debugged meta operators. +-- Added command line options handling. +-- +-- Revision 1.6 2006/11/10 02:11:17 fab13n +-- compiler faithfulness to 5.1 improved +-- gg.expr extended +-- mlp.expr refactored +-- +-- Revision 1.5 2006/11/09 09:39:57 fab13n +-- some cleanup +-- +-- Revision 1.4 2006/11/07 21:29:02 fab13n +-- improved quasi-quoting +-- +-- Revision 1.3 2006/11/07 04:38:00 fab13n +-- first bootstrapping version. +-- +-- Revision 1.2 2006/11/05 15:08:34 fab13n +-- updated code generation, to be compliant with 5.1 +-- +---------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.expr()] +-- * [mlp.expr_list()] +-- * [mlp.func_val()] +-- +-------------------------------------------------------------------------------- + +--require "gg" +--require "mlp_misc" +--require "mlp_table" +--require "mlp_meta" + +-------------------------------------------------------------------------------- +-- These function wrappers (eta-expansions ctually) are just here to break +-- some circular dependencies between mlp_xxx.lua files. +-------------------------------------------------------------------------------- +local function _expr (lx) return mlp.expr (lx) end +local function _table_content (lx) return mlp.table_content (lx) end +local function block (lx) return mlp.block (lx) end +local function stat (lx) return mlp.stat (lx) end + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- Non-empty expression list. Actually, this isn't used here, but that's +-- handy to give to users. +-------------------------------------------------------------------------------- +expr_list = gg.list{ _expr, separators = "," } + +-------------------------------------------------------------------------------- +-- Helpers for function applications / method applications +-------------------------------------------------------------------------------- +func_args_content = gg.list { + name = "function arguments", + _expr, separators = ",", terminators = ")" } + +-- Used to parse methods +method_args = gg.multisequence{ + name = "function argument(s)", + { "{", table_content, "}" }, + { "(", func_args_content, ")", builder = fget(1) }, + default = function(lx) local r = opt_string(lx); return r and {r} or { } end } + +-------------------------------------------------------------------------------- +-- [func_val] parses a function, from opening parameters parenthese to +-- "end" keyword included. Used for anonymous functions as well as +-- function declaration statements (both local and global). +-- +-- It's wrapped in a [_func_val] eta expansion, so that when expr +-- parser uses the latter, they will notice updates of [func_val] +-- definitions. +-------------------------------------------------------------------------------- +func_params_content = gg.list{ name="function parameters", + gg.multisequence{ { "...", builder = "Dots" }, default = id }, + separators = ",", terminators = {")", "|"} } + +local _func_params_content = function (lx) return func_params_content(lx) end + +func_val = gg.sequence { name="function body", + "(", func_params_content, ")", block, "end", builder = "Function" } + +local _func_val = function (lx) return func_val(lx) end + +-------------------------------------------------------------------------------- +-- Default parser for primary expressions +-------------------------------------------------------------------------------- +function id_or_literal (lx) + local a = lx:next() + if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then + gg.parse_error (lx, "Unexpected expr token %s", + _G.table.tostring (a, 'nohash')) + end + return a +end + + +-------------------------------------------------------------------------------- +-- Builder generator for operators. Wouldn't be worth it if "|x|" notation +-- were allowed, but then lua 5.1 wouldn't compile it +-------------------------------------------------------------------------------- + +-- opf1 = |op| |_,a| `Op{ op, a } +local function opf1 (op) return + function (_,a) return { tag="Op", op, a } end end + +-- opf2 = |op| |a,_,b| `Op{ op, a, b } +local function opf2 (op) return + function (a,_,b) return { tag="Op", op, a, b } end end + +-- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed) +local function opf2r (op) return + function (a,_,b) return { tag="Op", op, b, a } end end + +local function op_ne(a, _, b) + -- The first version guarantees to return the same code as Lua, + -- but it relies on the non-standard 'ne' operator, which has been + -- suppressed from the official AST grammar (although still supported + -- in practice by the compiler). + -- return { tag="Op", "ne", a, b } + return { tag="Op", "not", { tag="Op", "eq", a, b, lineinfo= { + first = a.lineinfo.first, last = b.lineinfo.last } } } +end + + +-------------------------------------------------------------------------------- +-- +-- complete expression +-- +-------------------------------------------------------------------------------- + +-- FIXME: set line number. In [expr] transformers probably + +expr = gg.expr { name = "expression", + + primary = gg.multisequence{ name="expr primary", + { "(", _expr, ")", builder = "Paren" }, + { "function", _func_val, builder = fget(1) }, + { "-{", splice_content, "}", builder = fget(1) }, + { "+{", quote_content, "}", builder = fget(1) }, + { "nil", builder = "Nil" }, + { "true", builder = "True" }, + { "false", builder = "False" }, + { "...", builder = "Dots" }, + table, + default = id_or_literal }, + + infix = { name="expr infix op", + { "+", prec = 60, builder = opf2 "add" }, + { "-", prec = 60, builder = opf2 "sub" }, + { "*", prec = 70, builder = opf2 "mul" }, + { "/", prec = 70, builder = opf2 "div" }, + { "%", prec = 70, builder = opf2 "mod" }, + { "^", prec = 90, builder = opf2 "pow", assoc = "right" }, + { "..", prec = 40, builder = opf2 "concat", assoc = "right" }, + { "==", prec = 30, builder = opf2 "eq" }, + { "~=", prec = 30, builder = op_ne }, + { "<", prec = 30, builder = opf2 "lt" }, + { "<=", prec = 30, builder = opf2 "le" }, + { ">", prec = 30, builder = opf2r "lt" }, + { ">=", prec = 30, builder = opf2r "le" }, + { "and",prec = 20, builder = opf2 "and" }, + { "or", prec = 10, builder = opf2 "or" } }, + + prefix = { name="expr prefix op", + { "not", prec = 80, builder = opf1 "not" }, + { "#", prec = 80, builder = opf1 "len" }, + { "-", prec = 80, builder = opf1 "unm" } }, + + suffix = { name="expr suffix op", + { "[", _expr, "]", builder = function (tab, idx) + return {tag="Index", tab, idx[1]} end}, + { ".", id, builder = function (tab, field) + return {tag="Index", tab, id2string(field[1])} end }, + { "(", func_args_content, ")", builder = function(f, args) + return {tag="Call", f, unpack(args[1])} end }, + { "{", _table_content, "}", builder = function (f, arg) + return {tag="Call", f, arg[1]} end}, + { ":", id, method_args, builder = function (obj, post) + return {tag="Invoke", obj, id2string(post[1]), unpack(post[2])} end}, + { "+{", quote_content, "}", builder = function (f, arg) + return {tag="Call", f, arg[1] } end }, + default = { name="opt_string_arg", parse = mlp.opt_string, builder = function(f, arg) + return {tag="Call", f, arg } end } } } diff --git a/builders/lua-inspect/metalualib/mlp_ext.lua b/builders/lua-inspect/metalualib/mlp_ext.lua new file mode 100644 index 000000000..af9780318 --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_ext.lua @@ -0,0 +1,89 @@ +-------------------------------------------------------------------------------- +-- +-- Non-Lua syntax extensions +-- +-------------------------------------------------------------------------------- + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- Alebraic Datatypes +-------------------------------------------------------------------------------- +local function adt (lx) + local tagval = id (lx) [1] + local tagkey = {tag="Pair", {tag="String", "tag"}, {tag="String", tagval} } + if lx:peek().tag == "String" or lx:peek().tag == "Number" then + return { tag="Table", tagkey, lx:next() } + elseif lx:is_keyword (lx:peek(), "{") then + local x = table (lx) + _G.table.insert (x, 1, tagkey) + return x + else return { tag="Table", tagkey } end +end + +expr:add{ "`", adt, builder = fget(1) } + +-------------------------------------------------------------------------------- +-- Anonymous lambda +-------------------------------------------------------------------------------- +local lambda_expr = gg.sequence{ + "|", func_params_content, "|", expr, + builder= function (x) + local li = x[2].lineinfo + return { tag="Function", x[1], + { {tag="Return", x[2], lineinfo=li }, lineinfo=li } } + end } + +-- In an earlier version, lambda_expr took an expr_list rather than an expr +-- after the 2nd bar. However, it happened to be much more of a burden than an +-- help, So finally I disabled it. If you want to return several results, +-- use the long syntax. +-------------------------------------------------------------------------------- +-- local lambda_expr = gg.sequence{ +-- "|", func_params_content, "|", expr_list, +-- builder= function (x) +-- return {tag="Function", x[1], { {tag="Return", unpack(x[2]) } } } end } + +expr:add (lambda_expr) + +-------------------------------------------------------------------------------- +-- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell. +-- This is not part of Lua 5.1 syntax, so it's added to the expression +-- afterwards, so that it's easier to disable. +-------------------------------------------------------------------------------- +local function expr_in_backquotes (lx) return expr(lx, 35) end + +expr.infix:add{ name = "infix function", + "`", expr_in_backquotes, "`", prec = 35, assoc="left", + builder = function(a, op, b) return {tag="Call", op[1], a, b} end } + + +-------------------------------------------------------------------------------- +-- table.override assignment +-------------------------------------------------------------------------------- + +mlp.lexer:add "<-" +stat.assignments["<-"] = function (a, b) + assert( #a==1 and #b==1, "No multi-args for '<-'") + return { tag="Call", { tag="Index", { tag="Id", "table" }, + { tag="String", "override" } }, + a[1], b[1]} +end + +-------------------------------------------------------------------------------- +-- C-style op+assignments +-------------------------------------------------------------------------------- +local function op_assign(kw, op) + local function rhs(a, b) + return { tag="Op", op, a, b } + end + local function f(a,b) + return { tag="Set", a, _G.table.imap(rhs, a, b) } + end + mlp.lexer:add (kw) + mlp.stat.assignments[kw] = f +end + +_G.table.iforeach (op_assign, + {"+=", "-=", "*=", "/="}, + {"add", "sub", "mul", "div"}) \ No newline at end of file diff --git a/builders/lua-inspect/metalualib/mlp_lexer.lua b/builders/lua-inspect/metalualib/mlp_lexer.lua new file mode 100644 index 000000000..be290f16d --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_lexer.lua @@ -0,0 +1,32 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: Source file lexer. ~~Currently only works on strings. +-- Some API refactoring is needed. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006-2007, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- + +module ("mlp", package.seeall) + +require "lexer" + +local mlp_lexer = lexer.lexer:clone() + +local keywords = { + "and", "break", "do", "else", "elseif", + "end", "false", "for", "function", "if", + "in", "local", "nil", "not", "or", "repeat", + "return", "then", "true", "until", "while", + "...", "..", "==", ">=", "<=", "~=", + "+{", "-{" } + +for w in values(keywords) do mlp_lexer:add(w) end + +_M.lexer = mlp_lexer diff --git a/builders/lua-inspect/metalualib/mlp_meta.lua b/builders/lua-inspect/metalualib/mlp_meta.lua new file mode 100644 index 000000000..27d476a15 --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_meta.lua @@ -0,0 +1,118 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mlp_meta.lua,v 1.4 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: Meta-operations: AST quasi-quoting and splicing +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- + + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.splice_content()] +-- * [mlp.quote_content()] +-- +-------------------------------------------------------------------------------- + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- External splicing: compile an AST into a chunk, load and evaluate +-- that chunk, and replace the chunk by its result (which must also be +-- an AST). +-------------------------------------------------------------------------------- + +function splice (ast) + local f = mlc.function_of_ast(ast, '=splice') + local result=f() + return result +end + +-------------------------------------------------------------------------------- +-- Going from an AST to an AST representing that AST +-- the only key being lifted in this version is ["tag"] +-------------------------------------------------------------------------------- +function quote (t) + --print("QUOTING:", _G.table.tostring(t, 60)) + local cases = { } + function cases.table (t) + local mt = { tag = "Table" } + --_G.table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } }) + if t.tag == "Splice" then + assert (#t==1, "Invalid splice") + local sp = t[1] + return sp + elseif t.tag then + _G.table.insert (mt, { tag = "Pair", quote "tag", quote (t.tag) }) + end + for _, v in ipairs (t) do + _G.table.insert (mt, quote(v)) + end + return mt + end + function cases.number (t) return { tag = "Number", t, quote = true } end + function cases.string (t) return { tag = "String", t, quote = true } end + return cases [ type (t) ] (t) +end + +-------------------------------------------------------------------------------- +-- when this variable is false, code inside [-{...}] is compiled and +-- avaluated immediately. When it's true (supposedly when we're +-- parsing data inside a quasiquote), [-{foo}] is replaced by +-- [`Splice{foo}], which will be unpacked by [quote()]. +-------------------------------------------------------------------------------- +in_a_quote = false + +-------------------------------------------------------------------------------- +-- Parse the inside of a "-{ ... }" +-------------------------------------------------------------------------------- +function splice_content (lx) + local parser_name = "expr" + if lx:is_keyword (lx:peek(2), ":") then + local a = lx:next() + lx:next() -- skip ":" + assert (a.tag=="Id", "Invalid splice parser name") + parser_name = a[1] + end + local ast = mlp[parser_name](lx) + if in_a_quote then + --printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60)) + return { tag="Splice", ast } + else + if parser_name == "expr" then ast = { { tag="Return", ast } } + elseif parser_name == "stat" then ast = { ast } + elseif parser_name ~= "block" then + error ("splice content must be an expr, stat or block") end + --printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60)) + return splice (ast) + end +end + +-------------------------------------------------------------------------------- +-- Parse the inside of a "+{ ... }" +-------------------------------------------------------------------------------- +function quote_content (lx) + local parser + if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content } + parser = mlp[id(lx)[1]] + lx:next() + else -- +{ content } + parser = mlp.expr + end + + local prev_iq = in_a_quote + in_a_quote = true + --print("IN_A_QUOTE") + local content = parser (lx) + local q_content = quote (content) + in_a_quote = prev_iq + return q_content +end + diff --git a/builders/lua-inspect/metalualib/mlp_misc.lua b/builders/lua-inspect/metalualib/mlp_misc.lua new file mode 100644 index 000000000..c09483d47 --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_misc.lua @@ -0,0 +1,185 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mlp_misc.lua,v 1.6 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: metalua parser, miscellaneous utility functions. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- +-- History: +-- $Log: mlp_misc.lua,v $ +-- Revision 1.6 2006/11/15 09:07:50 fab13n +-- debugged meta operators. +-- Added command line options handling. +-- +-- Revision 1.5 2006/11/10 02:11:17 fab13n +-- compiler faithfulness to 5.1 improved +-- gg.expr extended +-- mlp.expr refactored +-- +-- Revision 1.4 2006/11/09 09:39:57 fab13n +-- some cleanup +-- +-- Revision 1.3 2006/11/07 04:38:00 fab13n +-- first bootstrapping version. +-- +-- Revision 1.2 2006/11/05 15:08:34 fab13n +-- updated code generation, to be compliant with 5.1 +-- +---------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.fget()] +-- * [mlp.id()] +-- * [mlp.opt_id()] +-- * [mlp.id_list()] +-- * [mlp.gensym()] +-- * [mlp.string()] +-- * [mlp.opt_string()] +-- * [mlp.id2string()] +-- +-------------------------------------------------------------------------------- + +--require "gg" +--require "mll" + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- returns a function that takes the [n]th element of a table. +-- if [tag] is provided, then this element is expected to be a +-- table, and this table receives a "tag" field whose value is +-- set to [tag]. +-- +-- The primary purpose of this is to generate builders for +-- grammar generators. It has little purpose in metalua, as lambda has +-- a lightweight syntax. +-------------------------------------------------------------------------------- + +function fget (n, tag) + assert (type (n) == "number") + if tag then + assert (type (tag) == "string") + return function (x) + assert (type (x[n]) == "table") + return {tag=tag, unpack(x[n])} end + else + return function (x) return x[n] end + end +end + + +-------------------------------------------------------------------------------- +-- Try to read an identifier (possibly as a splice), or return [false] if no +-- id is found. +-------------------------------------------------------------------------------- +function opt_id (lx) + local a = lx:peek(); + if lx:is_keyword (a, "-{") then + local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1] + if v.tag ~= "Id" and v.tag ~= "Splice" then + gg.parse_error(lx,"Bad id splice") + end + return v + elseif a.tag == "Id" then return lx:next() + else return false end +end + +-------------------------------------------------------------------------------- +-- Mandatory reading of an id: causes an error if it can't read one. +-------------------------------------------------------------------------------- +function id (lx) + return opt_id (lx) or gg.parse_error(lx,"Identifier expected") +end + +-------------------------------------------------------------------------------- +-- Common helper function +-------------------------------------------------------------------------------- +id_list = gg.list { primary = mlp.id, separators = "," } + +-------------------------------------------------------------------------------- +-- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier. +-- The main purpose is to avoid variable capture in macros. +-- +-- If a string is passed as an argument, theis string will be part of the +-- id name (helpful for macro debugging) +-------------------------------------------------------------------------------- +local gensymidx = 0 + +function gensym (arg) + gensymidx = gensymidx + 1 + return { tag="Id", _G.string.format(".%i.%s", gensymidx, arg or "")} +end + +-------------------------------------------------------------------------------- +-- Converts an identifier into a string. Hopefully one day it'll handle +-- splices gracefully, but that proves quite tricky. +-------------------------------------------------------------------------------- +function id2string (id) + --print("id2string:", disp.ast(id)) + if id.tag == "Id" then id.tag = "String"; return id + elseif id.tag == "Splice" then + assert (in_a_quote, "can't do id2string on an outermost splice") + error ("id2string on splice not implemented") + -- Evaluating id[1] will produce `Id{ xxx }, + -- and we want it to produce `String{ xxx } + -- Morally, this is what I want: + -- return `String{ `Index{ `Splice{ id[1] }, `Number 1 } } + -- That is, without sugar: + return {tag="String", {tag="Index", {tag="Splice", id[1] }, + {tag="Number", 1 } } } + else error ("Identifier expected: "..table.tostring(id)) end +end + +-------------------------------------------------------------------------------- +-- Read a string, possibly spliced, or return an error if it can't +-------------------------------------------------------------------------------- +function string (lx) + local a = lx:peek() + if lx:is_keyword (a, "-{") then + local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1] + if v.tag ~= "" and v.tag ~= "Splice" then + gg.parse_error(lx,"Bad string splice") + end + return v + elseif a.tag == "String" then return lx:next() + else error "String expected" end +end + +-------------------------------------------------------------------------------- +-- Try to read a string, or return false if it can't. No splice allowed. +-------------------------------------------------------------------------------- +function opt_string (lx) + return lx:peek().tag == "String" and lx:next() +end + +-------------------------------------------------------------------------------- +-- Chunk reader: block + Eof +-------------------------------------------------------------------------------- +function skip_initial_sharp_comment (lx) + -- Dirty hack: I'm happily fondling lexer's private parts + -- FIXME: redundant with lexer:newstream() + lx :sync() + local i = lx.src:match ("^#.-\n()", lx.i) + if i then lx.i, lx.column_offset, lx.line = i, i, lx.line+1 end +end + +local function _chunk (lx) + if lx:peek().tag == 'Eof' then return { } -- handle empty files + else + skip_initial_sharp_comment (lx) + local chunk = block (lx) + if lx:peek().tag ~= "Eof" then error "End-of-file expected" end + return chunk + end +end + +-- chunk is wrapped in a sequence so that it has a "transformer" field. +chunk = gg.sequence { _chunk, builder = unpack } \ No newline at end of file diff --git a/builders/lua-inspect/metalualib/mlp_stat.lua b/builders/lua-inspect/metalualib/mlp_stat.lua new file mode 100644 index 000000000..0407165ff --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_stat.lua @@ -0,0 +1,221 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mlp_stat.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $ +-- +-- Summary: metalua parser, statement/block parser. This is part of +-- the definition of module [mlp]. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exports API: +-- * [mlp.stat()] +-- * [mlp.block()] +-- * [mlp.for_header()] +-- +-------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- eta-expansions to break circular dependency +-------------------------------------------------------------------------------- +local expr = function (lx) return mlp.expr (lx) end +local func_val = function (lx) return mlp.func_val (lx) end +local expr_list = function (lx) return mlp.expr_list(lx) end + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- List of all keywords that indicate the end of a statement block. Users are +-- likely to extend this list when designing extensions. +-------------------------------------------------------------------------------- + + +local block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" } + +-- FIXME: this must be handled from within GG!!! +function block_terminators:add(x) + if type (x) == "table" then for _, y in ipairs(x) do self:add (y) end + else _G.table.insert (self, x) end +end + +-------------------------------------------------------------------------------- +-- list of statements, possibly followed by semicolons +-------------------------------------------------------------------------------- +block = gg.list { + name = "statements block", + terminators = block_terminators, + primary = function (lx) + -- FIXME use gg.optkeyword() + local x = stat (lx) + if lx:is_keyword (lx:peek(), ";") then lx:next() end + return x + end } + +-------------------------------------------------------------------------------- +-- Helper function for "return " parsing. +-- Called when parsing return statements. +-- The specific test for initial ";" is because it's not a block terminator, +-- so without itgg.list would choke on "return ;" statements. +-- We don't make a modified copy of block_terminators because this list +-- is sometimes modified at runtime, and the return parser would get out of +-- sync if it was relying on a copy. +-------------------------------------------------------------------------------- +local return_expr_list_parser = gg.multisequence{ + { ";" , builder = function() return { } end }, + default = gg.list { + expr, separators = ",", terminators = block_terminators } } + +-------------------------------------------------------------------------------- +-- for header, between [for] and [do] (exclusive). +-- Return the `Forxxx{...} AST, without the body element (the last one). +-------------------------------------------------------------------------------- +function for_header (lx) + local var = mlp.id (lx) + if lx:is_keyword (lx:peek(), "=") then + -- Fornum: only 1 variable + lx:next() -- skip "=" + local e = expr_list (lx) + assert (2 <= #e and #e <= 3, "2 or 3 values in a fornum") + return { tag="Fornum", var, unpack (e) } + else + -- Forin: there might be several vars + local a = lx:is_keyword (lx:next(), ",", "in") + if a=="in" then var_list = { var, lineinfo = var.lineinfo } else + -- several vars; first "," skipped, read other vars + var_list = gg.list{ + primary = id, separators = ",", terminators = "in" } (lx) + _G.table.insert (var_list, 1, var) -- put back the first variable + var_list.lineinfo.first = var.lineinfo.first + --PATCHED:LuaInspect:correct lineinfo, e.g. `for a,b in f do end` + lx:next() -- skip "in" + end + local e = expr_list (lx) + return { tag="Forin", var_list, e } + end +end + +-------------------------------------------------------------------------------- +-- Function def parser helper: id ( . id ) * +-------------------------------------------------------------------------------- +local function fn_builder (list) + local r = list[1] + for i = 2, #list do r = { tag="Index", r, id2string(list[i]), + lineinfo={first=list[1].lineinfo.first, last=list[i].lineinfo.last} } end + --PATCHED:LuaInspect:added lineinfo to above line. e.g. `function a.b.c() end` + return r +end +local func_name = gg.list{ id, separators = ".", builder = fn_builder } + +-------------------------------------------------------------------------------- +-- Function def parser helper: ( : id )? +-------------------------------------------------------------------------------- +local method_name = gg.onkeyword{ name = "method invocation", ":", id, + transformers = { function(x) return x and id2string(x) end } } + +-------------------------------------------------------------------------------- +-- Function def builder +-------------------------------------------------------------------------------- +local function funcdef_builder(x) + local name, method, func = x[1], x[2], x[3] + if method then + name = { tag="Index", name, method, lineinfo = { + first = name.lineinfo.first, + last = method.lineinfo.last } } + _G.table.insert (func[1], 1, {tag="Id", "self"}) + end + local r = { tag="Set", {name}, {func} } + r[1].lineinfo = name.lineinfo + r[2].lineinfo = func.lineinfo + return r +end + + +-------------------------------------------------------------------------------- +-- if statement builder +-------------------------------------------------------------------------------- +local function if_builder (x) + local cb_pairs, else_block, r = x[1], x[2], {tag="If"} + for i=1,#cb_pairs do r[2*i-1]=cb_pairs[i][1]; r[2*i]=cb_pairs[i][2] end + if else_block then r[#r+1] = else_block end + return r +end + +-------------------------------------------------------------------------------- +-- produce a list of (expr,block) pairs +-------------------------------------------------------------------------------- +local elseifs_parser = gg.list { + gg.sequence { expr, "then", block }, + separators = "elseif", + terminators = { "else", "end" } } + +-------------------------------------------------------------------------------- +-- assignments and calls: statements that don't start with a keyword +-------------------------------------------------------------------------------- +local function assign_or_call_stat_parser (lx) + local e = expr_list (lx) + local a = lx:is_keyword(lx:peek()) + local op = a and stat.assignments[a] + if op then + --FIXME: check that [e] is a LHS + lx:next() + local v = expr_list (lx) + if type(op)=="string" then return { tag=op, e, v } + else return op (e, v) end + else + assert (#e > 0) + if #e > 1 then + gg.parse_error (lx, "comma is not a valid statement separator") end + if e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then + gg.parse_error (lx, "This expression is of type '%s'; ".. + "only function and method calls make valid statements", + e[1].tag or "") + end + return e[1] + end +end + +local_stat_parser = gg.multisequence{ + -- local function + { "function", id, func_val, builder = + function(x) + local vars = { x[1], lineinfo = x[1].lineinfo } + local vals = { x[2], lineinfo = x[2].lineinfo } + return { tag="Localrec", vars, vals } + end }, + -- local ( = )? + default = gg.sequence{ id_list, gg.onkeyword{ "=", expr_list }, + builder = function(x) return {tag="Local", x[1], x[2] or { } } end } } + +-------------------------------------------------------------------------------- +-- statement +-------------------------------------------------------------------------------- +stat = gg.multisequence { + name="statement", + { "do", block, "end", builder = + function (x) return { tag="Do", unpack (x[1]) } end }, + { "for", for_header, "do", block, "end", builder = + function (x) x[1][#x[1]+1] = x[2]; return x[1] end }, + { "function", func_name, method_name, func_val, builder=funcdef_builder }, + { "while", expr, "do", block, "end", builder = "While" }, + { "repeat", block, "until", expr, builder = "Repeat" }, + { "local", local_stat_parser, builder = fget (1) }, + { "return", return_expr_list_parser, builder = fget (1, "Return") }, + { "break", builder = function() return { tag="Break" } end }, + { "-{", splice_content, "}", builder = fget(1) }, + { "if", elseifs_parser, gg.onkeyword{ "else", block }, "end", + builder = if_builder }, + default = assign_or_call_stat_parser } + +stat.assignments = { + ["="] = "Set" } + +function stat.assignments:add(k, v) self[k] = v end diff --git a/builders/lua-inspect/metalualib/mlp_table.lua b/builders/lua-inspect/metalualib/mlp_table.lua new file mode 100644 index 000000000..dbaa7846c --- /dev/null +++ b/builders/lua-inspect/metalualib/mlp_table.lua @@ -0,0 +1,92 @@ +---------------------------------------------------------------------- +-- Metalua: $Id: mlp_table.lua,v 1.5 2006/11/10 02:11:17 fab13n Exp $ +-- +-- Summary: metalua parser, table constructor parser. This is part +-- of thedefinition of module [mlp]. +-- +---------------------------------------------------------------------- +-- +-- Copyright (c) 2006, Fabien Fleutot . +-- +-- This software is released under the MIT Licence, see licence.txt +-- for details. +-- +---------------------------------------------------------------------- +-- History: +-- $Log: mlp_table.lua,v $ +-- Revision 1.5 2006/11/10 02:11:17 fab13n +-- compiler faithfulness to 5.1 improved +-- gg.expr extended +-- mlp.expr refactored +-- +-- Revision 1.4 2006/11/09 09:39:57 fab13n +-- some cleanup +-- +-- Revision 1.3 2006/11/07 04:38:00 fab13n +-- first bootstrapping version. +-- +-- Revision 1.2 2006/11/05 15:08:34 fab13n +-- updated code generation, to be compliant with 5.1 +-- +---------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.table_field()] +-- * [mlp.table_content()] +-- * [mlp.table()] +-- +-- KNOWN BUG: doesn't handle final ";" or "," before final "}" +-- +-------------------------------------------------------------------------------- + +--require "gg" +--require "mll" +--require "mlp_misc" + +module ("mlp", package.seeall) + +-------------------------------------------------------------------------------- +-- eta expansion to break circular dependencies: +-------------------------------------------------------------------------------- +local function _expr (lx) return expr(lx) end + +-------------------------------------------------------------------------------- +-- [[key] = value] table field definition +-------------------------------------------------------------------------------- +local bracket_field = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" } + +-------------------------------------------------------------------------------- +-- [id = value] or [value] table field definition; +-- [[key]=val] are delegated to [bracket_field()] +-------------------------------------------------------------------------------- +function table_field (lx) + if lx:is_keyword (lx:peek(), "[") then return bracket_field (lx) end + local e = _expr (lx) + if lx:is_keyword (lx:peek(), "=") then + lx:next(); -- skip the "=" + local key = id2string(e) + local val = _expr(lx) + local r = { tag="Pair", key, val } + r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last } + return r + else return e end +end + +local function _table_field(lx) return table_field(lx) end + +-------------------------------------------------------------------------------- +-- table constructor, without enclosing braces; returns a full table object +-------------------------------------------------------------------------------- +table_content = gg.list { _table_field, + separators = { ",", ";" }, terminators = "}", builder = "Table" } + +local function _table_content(lx) return table_content(lx) end + +-------------------------------------------------------------------------------- +-- complete table constructor including [{...}] +-------------------------------------------------------------------------------- +table = gg.sequence{ "{", _table_content, "}", builder = fget(1) } + + diff --git a/builders/lua-inspect/test.lua b/builders/lua-inspect/test.lua new file mode 100755 index 000000000..3571227a9 --- /dev/null +++ b/builders/lua-inspect/test.lua @@ -0,0 +1,7 @@ +#!/usr/bin/env lua + +-- test writing examples.lua to examples.html +arg = {[0]=arg[0], '-fhtml', '-lhtmllib', '-oexamples.html', 'examples.lua'} +dofile 'luainspect' +print 'output written to examples.html' + diff --git a/builders/micropython-docker-build b/builders/micropython-docker-build index 902e289d7..976a3c05d 160000 --- a/builders/micropython-docker-build +++ b/builders/micropython-docker-build @@ -1 +1 @@ -Subproject commit 902e289d719c734c93ee5bc6b647237a97b1f3b6 +Subproject commit 976a3c05d5820f90016b7dd91a9c36c0027570a4 diff --git a/builders/mongoose-docker-build b/builders/mongoose-docker-build index eac5d6100..ae3853ef2 160000 --- a/builders/mongoose-docker-build +++ b/builders/mongoose-docker-build @@ -1 +1 @@ -Subproject commit eac5d610073769cdc305c07f9df0fd86ea61fe67 +Subproject commit ae3853ef2bde2b40c24cd43eb913d57353044e25 diff --git a/builders/nodemcu-docker-build b/builders/nodemcu-docker-build index 01bffbeb6..baf5fe87f 160000 --- a/builders/nodemcu-docker-build +++ b/builders/nodemcu-docker-build @@ -1 +1 @@ -Subproject commit 01bffbeb6cab8778059a710ee40283556dccf88b +Subproject commit baf5fe87f002fd4c8423518875d1d9c72f1a09d1 diff --git a/builders/nodemcu-firmware b/builders/nodemcu-firmware deleted file mode 160000 index aa48f20b9..000000000 --- a/builders/nodemcu-firmware +++ /dev/null @@ -1 +0,0 @@ -Subproject commit aa48f20b969d9d4a7ab971b224204fe3630ed5d0 diff --git a/builders/platformio-docker-build b/builders/platformio-docker-build index 8f8c5b33a..aa2da9635 160000 --- a/builders/platformio-docker-build +++ b/builders/platformio-docker-build @@ -1 +1 @@ -Subproject commit 8f8c5b33a1ced986ce4ceb2f7d13e808225fe79e +Subproject commit aa2da9635dcaddd55e7d809c1a7111746924e4a9