dotfiles from arch
This commit is contained in:
@@ -0,0 +1,731 @@
|
||||
-- C99 grammar written in lpeg.re.
|
||||
-- Adapted and translated from plain LPeg grammar for C99
|
||||
-- written by Wesley Smith https://github.com/Flymir/ceg
|
||||
--
|
||||
-- Copyright (c) 2009 Wesley Smith
|
||||
--
|
||||
-- Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
-- of this software and associated documentation files (the "Software"), to deal
|
||||
-- in the Software without restriction, including without limitation the rights
|
||||
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
-- copies of the Software, and to permit persons to whom the Software is
|
||||
-- furnished to do so, subject to the following conditions:
|
||||
--
|
||||
-- The above copyright notice and this permission notice shall be included in
|
||||
-- all copies or substantial portions of the Software.
|
||||
--
|
||||
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
-- THE SOFTWARE.
|
||||
|
||||
-- Reference used in the original and in this implementation:
|
||||
-- http://www.open-std.org/JTC1/SC22/wg14/www/docs/n1124.pdf
|
||||
|
||||
local c99 = {}
|
||||
|
||||
local re = require("parser.relabel")
|
||||
local typed = require("plugins.ffi.c-parser.typed")
|
||||
|
||||
local defs = {}
|
||||
|
||||
|
||||
c99.tracing = false
|
||||
|
||||
defs["trace"] = function(s, i)
|
||||
if c99.tracing then
|
||||
--local location = require("titan-compiler.location")
|
||||
--local line, col = location.get_line_number(s, i)
|
||||
--print("TRACE", line, col, "[[" ..s:sub(i, i+ 256):gsub("\n.*", "") .. "]]")
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local typedefs = {}
|
||||
|
||||
local function elem(xs, e)
|
||||
for _, x in ipairs(xs) do
|
||||
if e == x then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
defs["decl_func"] = typed("string, number, table -> boolean, Decl", function(_, _, decl)
|
||||
typed.set_type(decl, "Decl")
|
||||
return true, decl
|
||||
end)
|
||||
|
||||
defs["decl_ids"] = typed("string, number, table -> boolean, Decl?", function(_, _, decl)
|
||||
-- store typedef
|
||||
if elem(decl.spec, "typedef") then
|
||||
if not (decl.ids and decl.ids[1] and decl.ids[1].decl) then
|
||||
return true
|
||||
end
|
||||
for _, id in ipairs(decl.ids) do
|
||||
local name = id.decl.name or id.decl.declarator.name
|
||||
if name then
|
||||
typedefs[name] = true
|
||||
end
|
||||
end
|
||||
end
|
||||
typed.set_type(decl, "Decl")
|
||||
return true, decl
|
||||
end)
|
||||
|
||||
defs["is_typedef"] = function(_, _, id)
|
||||
--print("is " .. id .. " a typedef? " .. tostring(not not typedefs[id]))
|
||||
return typedefs[id], typedefs[id] and id
|
||||
end
|
||||
|
||||
defs["empty_table"] = function()
|
||||
return true, {}
|
||||
end
|
||||
|
||||
-- Flatten nested expression tables
|
||||
defs["nest_exp"] = typed("string, number, {Exp} -> boolean, Exp", function(_, _, exp)
|
||||
typed.set_type(exp, "Exp")
|
||||
if not exp.op then
|
||||
return true, exp[1]
|
||||
end
|
||||
return true, exp
|
||||
end)
|
||||
|
||||
-- Primary expression tables
|
||||
defs["prim_exp"] = typed("string, number, {string} -> boolean, Exp", function(_, _, exp)
|
||||
typed.set_type(exp, "Exp")
|
||||
return true, exp
|
||||
end)
|
||||
|
||||
-- Type tables
|
||||
defs["type_exp"] = typed("string, number, table -> boolean, Exp", function(_, _, exp)
|
||||
typed.check(exp[1], "Type")
|
||||
typed.set_type(exp, "Exp")
|
||||
return true, exp
|
||||
end)
|
||||
|
||||
-- Types
|
||||
defs["type"] = typed("string, number, table -> boolean, Type", function(_, _, typ)
|
||||
typed.set_type(typ, "Type")
|
||||
return true, typ
|
||||
end)
|
||||
|
||||
defs["join"] = typed("string, number, {array} -> boolean, array", function(_, _, xss)
|
||||
-- xss[1] .. xss[2]
|
||||
if xss[2] then
|
||||
table.move(xss[2], 1, #xss[2], #xss[1] + 1, xss[1])
|
||||
end
|
||||
return true, xss[1] or {}
|
||||
end)
|
||||
|
||||
defs["postfix"] = typed("string, number, table -> boolean, table", function(_, _, pf)
|
||||
typed.check(pf[1], "Exp")
|
||||
if pf.postfix ~= "" then
|
||||
pf[1].postfix = pf.postfix
|
||||
end
|
||||
return true, pf[1]
|
||||
end)
|
||||
|
||||
defs["litstruct"] = typed("string, number, number -> boolean, string", function(_, _, _)
|
||||
return true, "litstruct"
|
||||
end)
|
||||
|
||||
--==============================================================================
|
||||
-- Lexical Rules (used in both preprocessing and language processing)
|
||||
--==============================================================================
|
||||
|
||||
local lexical_rules = [[--lpeg.re
|
||||
|
||||
TRACE <- ({} => trace)
|
||||
|
||||
empty <- ("" => empty_table)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Identifiers
|
||||
|
||||
IDENTIFIER <- { identifierNondigit (identifierNondigit / [0-9])* } _
|
||||
identifierNondigit <- [a-zA-Z_]
|
||||
/ universalCharacterName
|
||||
|
||||
identifierList <- {| IDENTIFIER ("," _ IDENTIFIER)* |}
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Universal Character Names
|
||||
|
||||
universalCharacterName <- "\u" hexQuad
|
||||
/ "\U" hexQuad hexQuad
|
||||
hexQuad <- hexDigit^4
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- String Literals
|
||||
|
||||
STRING_LITERAL <- { ('"' / 'L"') sChar* '"' } _
|
||||
|
||||
sChar <- (!["\%nl] .) / escapeSequence
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Escape Sequences
|
||||
|
||||
escapeSequence <- simpleEscapeSequence
|
||||
/ octalEscapeSequence
|
||||
/ hexEscapeSequence
|
||||
/ universalCharacterName
|
||||
|
||||
simpleEscapeSequence <- "\" ['"?\abfnrtv]
|
||||
|
||||
octalEscapeSequence <- "\" [0-7] [0-7]^-2
|
||||
|
||||
hexEscapeSequence <- "\x" hexDigit+
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Constants
|
||||
|
||||
INTEGER_CONSTANT <- { ( hexConstant integerSuffix?
|
||||
/ octalConstant integerSuffix?
|
||||
/ decimalConstant integerSuffix?
|
||||
) } _
|
||||
|
||||
decimalConstant <- [1-9] digit*
|
||||
octalConstant <- "0" [0-7]*
|
||||
hexConstant <- ("0x" / "0X") hexDigit+
|
||||
|
||||
digit <- [0-9]
|
||||
hexDigit <- [0-9a-fA-F]
|
||||
|
||||
integerSuffix <- unsignedSuffix longLongSuffix
|
||||
/ unsignedSuffix longSuffix?
|
||||
/ longLongSuffix unsignedSuffix?
|
||||
/ longSuffix unsignedSuffix?
|
||||
|
||||
unsignedSuffix <- [uU]
|
||||
longSuffix <- [lL]
|
||||
longLongSuffix <- "ll" / "LL"
|
||||
|
||||
FLOATING_CONSTANT <- { ( decimalFloatingConstant
|
||||
/ hexFloatingConstant
|
||||
) } _
|
||||
|
||||
decimalFloatingConstant <- fractionalConstant exponentPart? floatingSuffix?
|
||||
/ digit+ exponentPart floatingSuffix?
|
||||
|
||||
hexFloatingConstant <- ("0x" / "0X" ) ( hexFractionalConstant binaryExponentPart floatingSuffix?
|
||||
/ hexDigit+ binaryExponentPart floatingSuffix? )
|
||||
|
||||
fractionalConstant <- digit* "." digit+
|
||||
/ digit "."
|
||||
|
||||
exponentPart <- [eE] [-+]? digit+
|
||||
|
||||
hexFractionalConstant <- hexDigit+? "." hexDigit+
|
||||
/ hexDigit+ "."
|
||||
|
||||
binaryExponentPart <- [pP] digit+
|
||||
|
||||
floatingSuffix <- [flFL]
|
||||
|
||||
CHARACTER_CONSTANT <- { ("'" / "L'") cChar+ "'" } _
|
||||
|
||||
cChar <- (!['\%nl] .) / escapeSequence
|
||||
|
||||
enumerationConstant <- IDENTIFIER
|
||||
|
||||
]]
|
||||
|
||||
local common_expression_rules = [[--lpeg.re
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Common Expression Rules
|
||||
|
||||
multiplicativeExpression <- {| castExpression ({:op: [*/%] :} _ castExpression )* |} => nest_exp
|
||||
additiveExpression <- {| multiplicativeExpression ({:op: [-+] :} _ multiplicativeExpression )* |} => nest_exp
|
||||
shiftExpression <- {| additiveExpression ({:op: ("<<" / ">>") :} _ additiveExpression )* |} => nest_exp
|
||||
relationalExpression <- {| shiftExpression ({:op: (">=" / "<=" / "<" / ">") :} _ shiftExpression )* |} => nest_exp
|
||||
equalityExpression <- {| relationalExpression ({:op: ("==" / "!=") :} _ relationalExpression )* |} => nest_exp
|
||||
bandExpression <- {| equalityExpression ({:op: "&" :} _ equalityExpression )* |} => nest_exp
|
||||
bxorExpression <- {| bandExpression ({:op: "^" :} _ bandExpression )* |} => nest_exp
|
||||
borExpression <- {| bxorExpression ({:op: "|" :} _ bxorExpression )* |} => nest_exp
|
||||
andExpression <- {| borExpression ({:op: "&&" :} _ borExpression )* |} => nest_exp
|
||||
orExpression <- {| andExpression ({:op: "||" :} _ andExpression )* |} => nest_exp
|
||||
conditionalExpression <- {| orExpression ({:op: "?" :} _ expression ":" _ conditionalExpression)? |} => nest_exp
|
||||
|
||||
constantExpression <- conditionalExpression
|
||||
|
||||
]]
|
||||
|
||||
--==============================================================================
|
||||
-- Language Rules (Phrase Structure Grammar)
|
||||
--==============================================================================
|
||||
|
||||
local language_rules = [[--lpeg.re
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- External Definitions
|
||||
|
||||
translationUnit <- %s* {| externalDeclaration* |} "$EOF$"
|
||||
|
||||
externalDeclaration <- functionDefinition
|
||||
/ declaration
|
||||
|
||||
functionDefinition <- {| {:spec: {| declarationSpecifier+ |} :} {:func: declarator :} {:decls: declaration* :} {:code: compoundStatement :} |} => decl_func
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Declarations
|
||||
|
||||
declaration <- {| gccExtensionSpecifier? {:spec: {| declarationSpecifier+ |} :} ({:ids: initDeclarationList :})? gccExtensionSpecifier* ";" _ |} => decl_ids
|
||||
|
||||
declarationSpecifier <- storageClassSpecifier
|
||||
/ typeSpecifier
|
||||
/ typeQualifier
|
||||
/ functionSpecifier
|
||||
|
||||
initDeclarationList <- {| initDeclarator ("," _ initDeclarator)* |}
|
||||
|
||||
initDeclarator <- {| {:decl: declarator :} ("=" _ {:value: initializer :} )? |}
|
||||
|
||||
gccExtensionSpecifier <- "__attribute__" _ "(" _ "(" _ gccAttributeList ")" _ ")" _
|
||||
/ gccAsm
|
||||
/ clangAsm
|
||||
/ "__DARWIN_ALIAS_STARTING_MAC_1060" _ "(" _ clangAsm ")" _
|
||||
/ "__AVAILABILITY_INTERNAL" [a-zA-Z0-9_]+ _ ("(" _ STRING_LITERAL ")" _ )?
|
||||
|
||||
gccAsm <- "__asm__" _ "(" _ (STRING_LITERAL / ":" _ / expression)+ ")" _
|
||||
|
||||
clangAsm <- "__asm" _ "(" _ (STRING_LITERAL / ":" _ / expression)+ ")" _
|
||||
|
||||
gccAttributeList <- {| gccAttributeItem ("," _ gccAttributeItem )* |}
|
||||
|
||||
gccAttributeItem <- clangAsm
|
||||
/ IDENTIFIER ("(" _ (expression ("," _ expression)*)? ")" _)?
|
||||
/ ""
|
||||
|
||||
storageClassSpecifier <- { "typedef" } _
|
||||
/ { "extern" } _
|
||||
/ { "static" } _
|
||||
/ { "auto" } _
|
||||
/ { "register" } _
|
||||
|
||||
typeSpecifier <- typedefName
|
||||
/ { "void" } _
|
||||
/ { "bool" } _
|
||||
/ { "char" } _
|
||||
/ { "short" } _
|
||||
/ { "int" } _
|
||||
/ { "long" } _
|
||||
/ { "float" } _
|
||||
/ { "double" } _
|
||||
/ { "signed" } _
|
||||
/ { "__signed" } _
|
||||
/ { "__signed__" } _
|
||||
/ { "unsigned" } _
|
||||
/ { "ptrdiff_t" } _
|
||||
/ { "size_t" } _
|
||||
/ { "ssize_t" } _
|
||||
/ { "wchar_t" } _
|
||||
/ { "int8_t" } _
|
||||
/ { "int16_t" } _
|
||||
/ { "int32_t" } _
|
||||
/ { "int64_t" } _
|
||||
/ { "uint8_t" } _
|
||||
/ { "uint16_t" } _
|
||||
/ { "uint32_t" } _
|
||||
/ { "uint64_t" } _
|
||||
/ { "intptr_t" } _
|
||||
/ { "uintptr_t" } _
|
||||
/ { "__int8" } _
|
||||
/ { "__int16" } _
|
||||
/ { "__int32" } _
|
||||
/ { "__int64" } _
|
||||
/ { "_Bool" } _
|
||||
/ { "_Complex" } _
|
||||
/ { "complex" } _
|
||||
/ { "__complex" } _
|
||||
/ { "__complex__" } _
|
||||
/ { "__ptr32" } _
|
||||
/ { "__ptr64" } _
|
||||
/ structOrUnionSpecifier
|
||||
/ enumSpecifier
|
||||
|
||||
typeQualifier <- { "const" } _
|
||||
/ { "restrict" } _
|
||||
/ { "volatile" } _
|
||||
|
||||
functionSpecifier <- { "inline" } _
|
||||
|
||||
structOrUnionSpecifier <- {| {:type: structOrUnion :} ({:id: IDENTIFIER :})? "{" _ {:fields: {| structDeclaration+ |} :}? "}" _ |}
|
||||
/ {| {:type: structOrUnion :} {:id: IDENTIFIER :} |}
|
||||
|
||||
structOrUnion <- { "struct" } _
|
||||
/ { "union" } _
|
||||
|
||||
anonymousUnion <- {| {:type: {| {:type: { "union" } :} _ "{" _ {:fields: {| structDeclaration+ |} :}? "}" _ |} :} |} ";" _
|
||||
|
||||
structDeclaration <- anonymousUnion
|
||||
/ {| {:type: {| specifierQualifier+ |} :} {:ids: structDeclaratorList :} |} ";" _
|
||||
|
||||
specifierQualifier <- typeSpecifier
|
||||
/ typeQualifier
|
||||
|
||||
structDeclaratorList <- {| structDeclarator ("," _ structDeclarator)* |}
|
||||
|
||||
structDeclarator <- declarator? ":" _ constantExpression
|
||||
/ declarator
|
||||
|
||||
enumSpecifier <- {| {:type: enum :} ({:id: IDENTIFIER :})? "{" _ {:values: enumeratorList :}? ("," _)? "}" _ |}
|
||||
/ {| {:type: enum :} {:id: IDENTIFIER :} |}
|
||||
|
||||
enum <- { "enum" } _
|
||||
|
||||
enumeratorList <- {| enumerator ("," _ enumerator)* |}
|
||||
|
||||
enumerator <- {| {:id: enumerationConstant :} ("=" _ {:value: constantExpression :})? |}
|
||||
|
||||
declarator <- {| pointer? directDeclarator |}
|
||||
|
||||
directDeclarator <- {:name: IDENTIFIER :} ddRec
|
||||
/ "(" _ {:declarator: declarator :} ")" _ ddRec
|
||||
ddRec <- "[" _ {| {:idx: typeQualifier* assignmentExpression? :} |} "]" _ ddRec
|
||||
/ "[" _ {| {:idx: { "static" } _ typeQualifier* assignmentExpression :} |} "]" _ ddRec
|
||||
/ "[" _ {| {:idx: typeQualifier+ { "static" } _ assignmentExpression :} |} "]" _ ddRec
|
||||
/ "[" _ {| {:idx: typeQualifier* { "*" } _ :} |} "]" _ ddRec
|
||||
/ "(" _ {:params: parameterTypeList / empty :} ")" _ ddRec
|
||||
/ "(" _ {:params: identifierList / empty :} ")" _ ddRec
|
||||
/ ""
|
||||
|
||||
pointer <- {| ({ "*"/"^" } _ typeQualifier*)+ |}
|
||||
|
||||
parameterTypeList <- {| parameterList "," _ {| { "..." } |} _ |} => join
|
||||
/ parameterList
|
||||
|
||||
parameterList <- {| parameterDeclaration ("," _ parameterDeclaration)* |}
|
||||
|
||||
parameterDeclaration <- {| {:param: {| {:type: {| declarationSpecifier+ |} :} {:id: (declarator / abstractDeclarator?) :} |} :} |}
|
||||
|
||||
typeName <- {| specifierQualifier+ abstractDeclarator? |} => type
|
||||
|
||||
abstractDeclarator <- pointer? directAbstractDeclarator
|
||||
/ pointer
|
||||
|
||||
directAbstractDeclarator <- ("(" _ abstractDeclarator ")" _) directAbstractDeclarator2*
|
||||
/ directAbstractDeclarator2+
|
||||
directAbstractDeclarator2 <- "[" _ assignmentExpression? "]" _
|
||||
/ "[" _ "*" _ "]" _
|
||||
/ "(" _ (parameterTypeList / empty) ")" _
|
||||
|
||||
typedefName <- IDENTIFIER => is_typedef
|
||||
|
||||
initializer <- assignmentExpression
|
||||
/ "{" _ initializerList ("," _)? "}" _
|
||||
|
||||
initializerList <- {| initializerList2 ("," _ initializerList2)* |}
|
||||
initializerList2 <- designation? initializer
|
||||
|
||||
designation <- designator+ "=" _
|
||||
|
||||
designator <- "[" _ constantExpression "]" _
|
||||
/ "." _ IDENTIFIER
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Statements
|
||||
|
||||
statement <- labeledStatement
|
||||
/ compoundStatement
|
||||
/ expressionStatement
|
||||
/ selectionStatement
|
||||
/ iterationStatement
|
||||
/ jumpStatement
|
||||
/ gccAsm ";" _
|
||||
|
||||
labeledStatement <- IDENTIFIER ":" _ statement
|
||||
/ "case" _ constantExpression ":" _ statement
|
||||
/ "default" _ ":" _ statement
|
||||
|
||||
compoundStatement <- "{" _ blockItem+ "}" _
|
||||
|
||||
blockItem <- declaration
|
||||
/ statement
|
||||
|
||||
expressionStatement <- expression? ";" _
|
||||
|
||||
selectionStatement <- "if" _ "(" _ expression ")" _ statement "else" _ statement
|
||||
/ "if" _ "(" _ expression ")" _ statement
|
||||
/ "switch" _ "(" _ expression ")" _ statement
|
||||
|
||||
iterationStatement <- "while" _ "(" _ expression ")" _ statement
|
||||
/ "do" _ statement "while" _ "(" _ expression ")" _ ";" _
|
||||
/ "for" _ "(" _ expression? ";" _ expression? ";" _ expression? ")" _ statement
|
||||
/ "for" _ "(" _ declaration expression? ";" _ expression? ")" _ statement
|
||||
|
||||
jumpStatement <- "goto" _ IDENTIFIER ";" _
|
||||
/ "continue" _ ";" _
|
||||
/ "break" _ ";" _
|
||||
/ "return" _ expression? ";" _
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Advanced Language Expression Rules
|
||||
-- (which require type names)
|
||||
|
||||
postfixExpression <- {| {:op: {} => litstruct :} "(" _ {:struct: typeName :} ")" _ "{" _ {:vals: initializerList :} ("," _)? "}" _ peRec |} => nest_exp
|
||||
/ {| primaryExpression {:postfix: peRec :} |} => postfix
|
||||
|
||||
sizeofOrPostfixExpression <- {| {:op: "sizeof" :} _ "(" _ typeName ")" _ |} => type_exp
|
||||
/ {| {:op: "sizeof" :} _ unaryExpression |} => nest_exp
|
||||
/ postfixExpression
|
||||
|
||||
castExpression <- {| "(" _ typeName ")" _ castExpression |} => type_exp
|
||||
/ unaryExpression
|
||||
|
||||
]]
|
||||
|
||||
--==============================================================================
|
||||
-- Language Expression Rules
|
||||
--==============================================================================
|
||||
|
||||
local language_expression_rules = [[--lpeg.re
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Language Expression Rules
|
||||
-- (rules which differ from preprocessing stage)
|
||||
|
||||
expression <- {| assignmentExpression ({:op: "," :} _ assignmentExpression)* |} => nest_exp
|
||||
|
||||
constant <- ( FLOATING_CONSTANT
|
||||
/ INTEGER_CONSTANT
|
||||
/ CHARACTER_CONSTANT
|
||||
/ enumerationConstant
|
||||
)
|
||||
|
||||
primaryExpression <- {| constant |} => prim_exp
|
||||
/ {| IDENTIFIER |} => prim_exp
|
||||
/ {| STRING_LITERAL+ |} => prim_exp
|
||||
/ "(" _ expression ")" _
|
||||
|
||||
peRec <- {| "[" _ {:idx: expression :} "]" _ peRec |}
|
||||
/ {| "(" _ {:args: argumentExpressionList / empty :} ")" _ peRec |}
|
||||
/ {| "." _ {:dot: IDENTIFIER :} peRec |}
|
||||
/ {| "->" _ {:arrow: IDENTIFIER :} peRec |}
|
||||
/ {| "++" _ peRec |}
|
||||
/ {| "--" _ peRec |}
|
||||
/ ""
|
||||
|
||||
argumentExpressionList <- {| assignmentExpression ("," _ assignmentExpression)* |}
|
||||
|
||||
unaryExpression <- {| {:op: prefixOp :} unaryExpression |} => nest_exp
|
||||
/ {| {:op: unaryOperator :} castExpression |} => nest_exp
|
||||
/ sizeofOrPostfixExpression
|
||||
|
||||
prefixOp <- { "++" } _
|
||||
/ { "--" } _
|
||||
|
||||
unaryOperator <- { [-+~!*&] } _
|
||||
|
||||
assignmentExpression <- unaryExpression assignmentOperator assignmentExpression
|
||||
/ conditionalExpression
|
||||
|
||||
assignmentOperator <- "=" _
|
||||
/ "*=" _
|
||||
/ "/=" _
|
||||
/ "%=" _
|
||||
/ "+=" _
|
||||
/ "-=" _
|
||||
/ "<<=" _
|
||||
/ ">>=" _
|
||||
/ "&=" _
|
||||
/ "^=" _
|
||||
/ "|=" _
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Language whitespace
|
||||
|
||||
_ <- %s+
|
||||
S <- %s+
|
||||
|
||||
]]
|
||||
|
||||
local simplified_language_expression_rules = [[--lpeg.re
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Simplified Language Expression Rules
|
||||
-- (versions that do not require knowledge of type names)
|
||||
|
||||
postfixExpression <- {| primaryExpression {:postfix: peRec :} |} => postfix
|
||||
|
||||
sizeofOrPostfixExpression <- postfixExpression
|
||||
|
||||
castExpression <- unaryExpression
|
||||
|
||||
]]
|
||||
|
||||
--==============================================================================
|
||||
-- Preprocessing Rules
|
||||
--==============================================================================
|
||||
|
||||
local preprocessing_rules = [[--lpeg.re
|
||||
|
||||
preprocessingLine <- _ ( "#" _ directive _
|
||||
/ "#" _ preprocessingTokenList? {| _ |} -- non-directive, ignore
|
||||
/ preprocessingTokenList
|
||||
/ empty
|
||||
)
|
||||
|
||||
preprocessingTokenList <- {| (preprocessingToken _)+ |}
|
||||
|
||||
directive <- {| {:directive: "if" :} S {:exp: preprocessingTokenList :} |}
|
||||
/ {| {:directive: "ifdef" :} S {:id: IDENTIFIER :} |}
|
||||
/ {| {:directive: "ifndef" :} S {:id: IDENTIFIER :} |}
|
||||
/ {| {:directive: "elif" :} S {:exp: preprocessingTokenList :} |}
|
||||
/ {| {:directive: "else" :} |}
|
||||
/ {| {:directive: "endif" :} |}
|
||||
/ {| {:directive: "include" :} S {:exp: headerName :} |}
|
||||
/ {| {:directive: "define" :} S {:id: IDENTIFIER :} "(" _ {:args: defineArgList :} _ ")" _ {:repl: replacementList :} |}
|
||||
/ {| {:directive: "define" :} S {:id: IDENTIFIER :} _ {:repl: replacementList :} |}
|
||||
/ {| {:directive: "undef" :} S {:id: IDENTIFIER :} |}
|
||||
/ {| {:directive: "line" :} S {:line: preprocessingTokenList :} |}
|
||||
/ {| {:directive: "error" :} S {:error: preprocessingTokenList / empty :} |}
|
||||
/ {| {:directive: "error" :} |}
|
||||
/ {| {:directive: "pragma" :} S {:pragma: preprocessingTokenList / empty :} |}
|
||||
/ gccDirective
|
||||
/ ""
|
||||
|
||||
gccDirective <- {| {:directive: "include_next" :} S {:exp: headerName :} |}
|
||||
/ {| {:directive: "warning" :} S {:exp: preprocessingTokenList / empty :} |}
|
||||
|
||||
defineArgList <- {| { "..." } |}
|
||||
/ {| identifierList _ "," _ {| { "..." } |} |} => join
|
||||
/ identifierList
|
||||
/ empty
|
||||
|
||||
replacementList <- {| (preprocessingToken _)* |}
|
||||
|
||||
preprocessingToken <- preprocessingNumber
|
||||
/ CHARACTER_CONSTANT
|
||||
/ STRING_LITERAL
|
||||
/ punctuator
|
||||
/ IDENTIFIER
|
||||
|
||||
headerName <- {| {:mode: "<" -> "system" :} { (![%nl>] .)+ } ">" |}
|
||||
/ {| {:mode: '"' -> "quote" :} { (![%nl"] .)+ } '"' |}
|
||||
/ {| IDENTIFIER |} -- macro
|
||||
|
||||
preprocessingNumber <- { ("."? digit) ( digit
|
||||
/ [eEpP] [-+]
|
||||
/ identifierNondigit
|
||||
/ "."
|
||||
)* }
|
||||
|
||||
punctuator <- { digraphs / '...' / '<<=' / '>>=' /
|
||||
'##' / '<<' / '>>' / '->' / '++' / '--' / '&&' / '||' / '<=' / '>=' /
|
||||
'==' / '!=' / '*=' / '/=' / '%=' / '+=' / '-=' / '&=' / '^=' / '|=' /
|
||||
'#' / '[' / ']' / '(' / ')' / '{' / '}' / '.' / '&' /
|
||||
'*' / '+' / '-' / '~' / '!' / '/' / '%' / '<' / '>' /
|
||||
'^' / '|' / '?' / ':' / ';' / ',' / '=' }
|
||||
|
||||
digraphs <- '%:%:' -> "##"
|
||||
/ '%:' -> "#"
|
||||
/ '<:' -> "["
|
||||
/ ':>' -> "]"
|
||||
/ '<%' -> "{"
|
||||
/ '%>' -> "}"
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Preprocessing whitespace
|
||||
|
||||
_ <- %s*
|
||||
S <- %s+
|
||||
|
||||
]]
|
||||
|
||||
--==============================================================================
|
||||
-- Preprocessing Expression Rules
|
||||
--==============================================================================
|
||||
|
||||
local preprocessing_expression_rules = [[--lpeg.re
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Preprocessing Expression Rules
|
||||
-- (rules which differ from language processing stage)
|
||||
|
||||
expression <- constantExpression
|
||||
|
||||
constant <- FLOATING_CONSTANT
|
||||
/ INTEGER_CONSTANT
|
||||
/ CHARACTER_CONSTANT
|
||||
|
||||
primaryExpression <- {| IDENTIFIER |} => prim_exp
|
||||
/ {| constant |} => prim_exp
|
||||
/ "(" _ expression _ ")" _
|
||||
|
||||
postfixExpression <- primaryExpression peRec
|
||||
peRec <- "(" _ (argumentExpressionList / empty) ")" _ peRec
|
||||
/ ""
|
||||
|
||||
argumentExpressionList <- {| expression ("," _ expression )* |}
|
||||
|
||||
unaryExpression <- {| {:op: unaryOperator :} unaryExpression |} => nest_exp
|
||||
/ primaryExpression
|
||||
|
||||
unaryOperator <- { [-+~!] } _
|
||||
/ { "defined" } _
|
||||
|
||||
castExpression <- unaryExpression
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Preprocessing expression whitespace
|
||||
|
||||
_ <- %s*
|
||||
S <- %s+
|
||||
|
||||
]]
|
||||
|
||||
local preprocessing_grammar = re.compile(
|
||||
preprocessing_rules ..
|
||||
lexical_rules, defs)
|
||||
|
||||
local preprocessing_expression_grammar = re.compile(
|
||||
preprocessing_expression_rules ..
|
||||
lexical_rules ..
|
||||
common_expression_rules, defs)
|
||||
|
||||
local language_expression_grammar = re.compile(
|
||||
language_expression_rules ..
|
||||
simplified_language_expression_rules ..
|
||||
lexical_rules ..
|
||||
common_expression_rules, defs)
|
||||
|
||||
local language_grammar = re.compile(
|
||||
language_rules ..
|
||||
language_expression_rules ..
|
||||
lexical_rules ..
|
||||
common_expression_rules, defs)
|
||||
|
||||
local function match(grammar, subject)
|
||||
local res, err, pos = grammar:match(subject)
|
||||
if res == nil then
|
||||
local l, c = re.calcline(subject, pos)
|
||||
local fragment = subject:sub(pos, pos+20)
|
||||
return res, err, l, c, fragment
|
||||
end
|
||||
return res
|
||||
end
|
||||
|
||||
function c99.match_language_grammar(subject)
|
||||
typedefs = {}
|
||||
return match(language_grammar, subject)
|
||||
end
|
||||
|
||||
function c99.match_language_expression_grammar(subject)
|
||||
return match(language_expression_grammar, subject)
|
||||
end
|
||||
|
||||
function c99.match_preprocessing_grammar(subject)
|
||||
return match(preprocessing_grammar, subject)
|
||||
end
|
||||
|
||||
function c99.match_preprocessing_expression_grammar(subject)
|
||||
return match(preprocessing_expression_grammar, subject)
|
||||
end
|
||||
|
||||
return c99
|
||||
@@ -0,0 +1,152 @@
|
||||
|
||||
local cdefines = {}
|
||||
|
||||
local c99 = require("plugins.ffi.c-parser.c99")
|
||||
local cpp = require("plugins.ffi.c-parser.cpp")
|
||||
local typed = require("plugins.ffi.c-parser.typed")
|
||||
|
||||
local function add_type(lst, name, typ)
|
||||
lst[name] = typ
|
||||
table.insert(lst, { name = name, type = typ })
|
||||
end
|
||||
|
||||
local base_c_types = {
|
||||
CONST_CHAR_PTR = { "const", "char", "*" },
|
||||
CONST_CHAR = { "const", "char" },
|
||||
LONG_LONG = { "long", "long" },
|
||||
LONG = { "long" },
|
||||
DOUBLE = { "double" },
|
||||
INT = { "int" },
|
||||
}
|
||||
|
||||
local function get_binop_type(e1, e2)
|
||||
if e1[1] == "double" or e2[1] == "double" then
|
||||
return base_c_types.DOUBLE
|
||||
end
|
||||
if e1[2] == "long" or e2[2] == "long" then
|
||||
return base_c_types.LONG_LONG
|
||||
end
|
||||
if e1[1] == "long" or e2[1] == "long" then
|
||||
return base_c_types.LONG
|
||||
end
|
||||
return base_c_types.INT
|
||||
end
|
||||
|
||||
local binop_set = {
|
||||
["+"] = true,
|
||||
["-"] = true,
|
||||
["*"] = true,
|
||||
["/"] = true,
|
||||
["%"] = true,
|
||||
}
|
||||
|
||||
local relop_set = {
|
||||
["<"] = true,
|
||||
[">"] = true,
|
||||
[">="] = true,
|
||||
["<="] = true,
|
||||
["=="] = true,
|
||||
["!="] = true,
|
||||
}
|
||||
|
||||
local bitop_set = {
|
||||
["<<"] = true,
|
||||
[">>"] = true,
|
||||
["&"] = true,
|
||||
["^"] = true,
|
||||
["|"] = true,
|
||||
}
|
||||
|
||||
-- Best-effort assessment of the type of a #define
|
||||
local get_type_of_exp
|
||||
get_type_of_exp = typed("Exp, TypeList -> {string}?", function(exp, lst)
|
||||
if type(exp[1]) == "string" and exp[2] == nil then
|
||||
local val = exp[1]
|
||||
if val:sub(1,1) == '"' or val:sub(1,2) == 'L"' then
|
||||
return base_c_types.CONST_CHAR_PTR
|
||||
elseif val:sub(1,1) == "'" or val:sub(1,2) == "L'" then
|
||||
return base_c_types.CONST_CHAR
|
||||
elseif val:match("^[0-9]*LL$") then
|
||||
return base_c_types.LONG_LONG
|
||||
elseif val:match("^[0-9]*L$") then
|
||||
return base_c_types.LONG
|
||||
elseif val:match("%.") then
|
||||
return base_c_types.DOUBLE
|
||||
else
|
||||
return base_c_types.INT
|
||||
end
|
||||
end
|
||||
|
||||
if type(exp[1]) == "string" and exp[2] and exp[2].args then
|
||||
local fn = lst[exp[1]]
|
||||
if not fn or not fn.ret then
|
||||
return nil -- unknown function, or not a function
|
||||
end
|
||||
local r = fn.ret.type
|
||||
return table.move(r, 1, #r, 1, {}) -- shallow_copy(r)
|
||||
end
|
||||
|
||||
if exp.unop == "*" then
|
||||
local etype = get_type_of_exp(exp[1], lst)
|
||||
if not etype then
|
||||
return nil
|
||||
end
|
||||
local rem = table.remove(etype)
|
||||
assert(rem == "*")
|
||||
return etype
|
||||
elseif exp.unop == "-" then
|
||||
return get_type_of_exp(exp[1], lst)
|
||||
elseif exp.op == "?" then
|
||||
return get_type_of_exp(exp[2], lst)
|
||||
elseif exp.op == "," then
|
||||
return get_type_of_exp(exp[#exp], lst)
|
||||
elseif binop_set[exp.op] then
|
||||
local e1 = get_type_of_exp(exp[1], lst)
|
||||
if not e1 then
|
||||
return nil
|
||||
end
|
||||
-- some binops are also unops (e.g. - and *)
|
||||
if exp[2] then
|
||||
local e2 = get_type_of_exp(exp[2], lst)
|
||||
if not e2 then
|
||||
return nil
|
||||
end
|
||||
return get_binop_type(e1, e2)
|
||||
else
|
||||
return e1
|
||||
end
|
||||
elseif relop_set[exp.op] then
|
||||
return base_c_types.INT
|
||||
elseif bitop_set[exp.op] then
|
||||
return get_type_of_exp(exp[1], lst) -- ...or should it be int?
|
||||
elseif exp.op then
|
||||
print("FIXME unsupported op", exp.op)
|
||||
end
|
||||
return nil
|
||||
end)
|
||||
|
||||
function cdefines.register_define(lst, name, text, define_set)
|
||||
local exp, err, line, col = c99.match_language_expression_grammar(text .. " ")
|
||||
if not exp then
|
||||
-- failed parsing expression
|
||||
-- print(("failed parsing: %d:%d: %s\n"):format(line, col, text))
|
||||
return
|
||||
end
|
||||
local typ = get_type_of_exp(exp, lst)
|
||||
if typ then
|
||||
add_type(lst, name, { type = typ })
|
||||
end
|
||||
end
|
||||
|
||||
function cdefines.register_defines(lst, define_set)
|
||||
for name, def in pairs(define_set) do
|
||||
if #def == 0 then
|
||||
goto continue
|
||||
end
|
||||
local text = cpp.expand_macro(name, define_set)
|
||||
cdefines.register_define(lst, name, text, define_set)
|
||||
::continue::
|
||||
end
|
||||
end
|
||||
|
||||
return cdefines
|
||||
@@ -0,0 +1,54 @@
|
||||
local cdriver = {}
|
||||
|
||||
local cpp = require("plugins.ffi.c-parser.cpp")
|
||||
local c99 = require("plugins.ffi.c-parser.c99")
|
||||
local ctypes = require("plugins.ffi.c-parser.ctypes")
|
||||
local cdefines = require("plugins.ffi.c-parser.cdefines")
|
||||
|
||||
function cdriver.process_file(filename)
|
||||
local ctx, err = cpp.parse_file(filename)
|
||||
if not ctx then
|
||||
return nil, "failed preprocessing '"..filename.."': " .. err
|
||||
end
|
||||
|
||||
local srccode = table.concat(ctx.output, "\n").." $EOF$"
|
||||
|
||||
local res, err, line, col, fragment = c99.match_language_grammar(srccode)
|
||||
if not res then
|
||||
return nil, ("failed parsing: %s:%d:%d: %s\n%s"):format(filename, line, col, err, fragment)
|
||||
end
|
||||
|
||||
local ffi_types, err = ctypes.register_types(res)
|
||||
if not ffi_types then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
cdefines.register_defines(ffi_types, ctx.defines)
|
||||
|
||||
return ffi_types
|
||||
end
|
||||
|
||||
function cdriver.process_context(context)
|
||||
local ctx, err = cpp.parse_context(context)
|
||||
if not ctx then
|
||||
return nil, "failed preprocessing '"..context.."': " .. err
|
||||
end
|
||||
|
||||
local srccode = table.concat(ctx.output, "\n").." $EOF$"
|
||||
|
||||
local res, err, line, col, fragment = c99.match_language_grammar(srccode)
|
||||
if not res then
|
||||
return nil, ("failed parsing: %s:%d:%d: %s\n%s"):format(context, line, col, err, fragment)
|
||||
end
|
||||
|
||||
local ffi_types, err = ctypes.register_types(res)
|
||||
if not ffi_types then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
cdefines.register_defines(ffi_types, ctx.defines)
|
||||
|
||||
return ffi_types
|
||||
end
|
||||
|
||||
return cdriver
|
||||
@@ -0,0 +1,869 @@
|
||||
local cpp = {}
|
||||
|
||||
local typed = require("plugins.ffi.c-parser.typed")
|
||||
local c99 = require("plugins.ffi.c-parser.c99")
|
||||
|
||||
local SEP = package.config:sub(1,1)
|
||||
|
||||
local function shl(a, b)
|
||||
return a << b
|
||||
end
|
||||
local function shr(a, b)
|
||||
return a >> b
|
||||
end
|
||||
|
||||
local function debug(...) end
|
||||
--[[
|
||||
local inspect = require("inspect")
|
||||
local function debug(...)
|
||||
local args = { ... }
|
||||
for i, arg in ipairs(args) do
|
||||
if type(arg) == "table" then
|
||||
args[i] = inspect(arg)
|
||||
end
|
||||
end
|
||||
print(table.unpack(args))
|
||||
end
|
||||
|
||||
local function is_sequence(xs)
|
||||
if type(xs) ~= "table" then
|
||||
return false
|
||||
end
|
||||
local l = #xs
|
||||
for k, _ in pairs(xs) do
|
||||
if type(k) ~= "number" or k < 1 or k > l or math.floor(k) ~= k then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
--]]
|
||||
|
||||
local gcc_default_defines
|
||||
do
|
||||
local default_defines
|
||||
|
||||
local function shallow_copy(t)
|
||||
local u = {}
|
||||
for k,v in pairs(t) do
|
||||
u[k] = v
|
||||
end
|
||||
return u
|
||||
end
|
||||
|
||||
gcc_default_defines = function()
|
||||
if default_defines then
|
||||
return shallow_copy(default_defines)
|
||||
end
|
||||
|
||||
local pd = io.popen("LANG=C gcc -dM -E - < /dev/null")
|
||||
if not pd then
|
||||
return {}
|
||||
end
|
||||
local blank_ctx = {
|
||||
incdirs = {},
|
||||
defines = {},
|
||||
ifmode = { true },
|
||||
output = {},
|
||||
current_dir = {},
|
||||
}
|
||||
typed.set_type(blank_ctx, "Ctx")
|
||||
local ctx = cpp.parse_file("-", pd, blank_ctx)
|
||||
|
||||
ctx.defines["__builtin_va_list"] = { "char", "*" }
|
||||
ctx.defines["__extension__"] = {}
|
||||
ctx.defines["__attribute__"] = { args = { "arg" }, repl = {} }
|
||||
ctx.defines["__restrict__"] = { "restrict" }
|
||||
ctx.defines["__restrict"] = { "restrict" }
|
||||
ctx.defines["__inline__"] = { "inline" }
|
||||
ctx.defines["__inline"] = { "inline" }
|
||||
|
||||
default_defines = ctx.defines
|
||||
return shallow_copy(ctx.defines)
|
||||
end
|
||||
end
|
||||
|
||||
local function cpp_include_paths()
|
||||
local pd = io.popen("LANG=C cpp -v /dev/null -o /dev/null 2>&1")
|
||||
if not pd then
|
||||
return { quote = {}, system = { "/usr/include"} }
|
||||
end
|
||||
local res = {
|
||||
quote = {},
|
||||
system = {},
|
||||
}
|
||||
local mode = nil
|
||||
for line in pd:lines() do
|
||||
if line:find([[#include "..." search starts here]], 1, true) then
|
||||
mode = "quote"
|
||||
elseif line:find([[#include <...> search starts here]], 1, true) then
|
||||
mode = "system"
|
||||
elseif line:find([[End of search list]], 1, true) then
|
||||
mode = nil
|
||||
elseif mode then
|
||||
table.insert(res[mode], line:sub(2))
|
||||
end
|
||||
end
|
||||
pd:close()
|
||||
return res
|
||||
end
|
||||
|
||||
-- TODO default defines: `gcc -dM -E - < /dev/null`
|
||||
|
||||
-- Not supported:
|
||||
-- * character set conversion
|
||||
-- * trigraphs
|
||||
|
||||
local states = {
|
||||
any = {
|
||||
['"'] = { next = "dquote" },
|
||||
["'"] = { next = "squote" },
|
||||
["/"] = { silent = true, next = "slash" },
|
||||
},
|
||||
dquote = {
|
||||
['"'] = { next = "any" },
|
||||
["\\"] = { next = "dquote_backslash" },
|
||||
},
|
||||
dquote_backslash = {
|
||||
single_char = true,
|
||||
default = { next = "dquote" },
|
||||
},
|
||||
squote = {
|
||||
["'"] = { next = "any" },
|
||||
["\\"] = { next = "squote_backslash" },
|
||||
},
|
||||
squote_backslash = {
|
||||
single_char = true,
|
||||
default = { next = "squote" },
|
||||
},
|
||||
slash = {
|
||||
single_char = true,
|
||||
["/"] = { add = " ", silent = true, next = "line_comment" },
|
||||
["*"] = { add = " ", silent = true, next = "block_comment" },
|
||||
default = { add = "/", next = "any" },
|
||||
},
|
||||
line_comment = {
|
||||
silent = true,
|
||||
},
|
||||
block_comment = {
|
||||
silent = true,
|
||||
["*"] = { silent = true, next = "try_end_block_comment" },
|
||||
continue_line = "block_comment",
|
||||
},
|
||||
try_end_block_comment = {
|
||||
single_char = true,
|
||||
silent = true,
|
||||
["/"] = { silent = true, next = "any" },
|
||||
["*"] = { silent = true, next = "try_end_block_comment" },
|
||||
default = { silent = true, next = "block_comment" },
|
||||
continue_line = "block_comment",
|
||||
},
|
||||
}
|
||||
|
||||
for _, rules in pairs(states) do
|
||||
local out = "["
|
||||
for k, _ in pairs(rules) do
|
||||
if #k == 1 then
|
||||
out = out .. k
|
||||
end
|
||||
end
|
||||
out = out .. "]"
|
||||
rules.pattern = out ~= "[]" and out
|
||||
end
|
||||
|
||||
local function add(buf, txt)
|
||||
if not buf then
|
||||
buf = {}
|
||||
end
|
||||
table.insert(buf, txt)
|
||||
return buf
|
||||
end
|
||||
|
||||
cpp.initial_processing = typed("FILE* -> LineList", function(fd)
|
||||
local backslash_buf
|
||||
local buf
|
||||
local state = "any"
|
||||
local output = {}
|
||||
local linenr = 0
|
||||
for line in fd:lines() do
|
||||
linenr = linenr + 1
|
||||
local len = #line
|
||||
if line:find("\\", len, true) then
|
||||
-- If backslash-terminated, buffer it
|
||||
backslash_buf = add(backslash_buf, line:sub(1, len - 1))
|
||||
else
|
||||
-- Merge backslash-terminated line
|
||||
if backslash_buf then
|
||||
table.insert(backslash_buf, line)
|
||||
line = table.concat(backslash_buf)
|
||||
end
|
||||
backslash_buf = nil
|
||||
|
||||
len = #line
|
||||
local i = 1
|
||||
local out = ""
|
||||
-- Go through the line
|
||||
while i <= len do
|
||||
-- Current state in the state machine
|
||||
local st = states[state]
|
||||
|
||||
-- Look for next character matching a state transition
|
||||
local n = nil
|
||||
if st.pattern then
|
||||
if st.single_char then
|
||||
if line:sub(i,i):find(st.pattern) then
|
||||
n = i
|
||||
end
|
||||
else
|
||||
n = line:find(st.pattern, i)
|
||||
end
|
||||
end
|
||||
|
||||
local transition, ch
|
||||
if n then
|
||||
ch = line:sub(n, n)
|
||||
transition = st[ch]
|
||||
else
|
||||
n = i
|
||||
ch = line:sub(n, n)
|
||||
transition = st.default
|
||||
end
|
||||
|
||||
if not transition then
|
||||
-- output the rest of the string if we should
|
||||
if not st.silent then
|
||||
out = i == 1 and line or line:sub(i)
|
||||
end
|
||||
break
|
||||
end
|
||||
|
||||
-- output everything up to the transition if we should
|
||||
if n > i and not st.silent then
|
||||
buf = add(buf, line:sub(i, n - 1))
|
||||
end
|
||||
|
||||
-- Some transitions output an explicit character
|
||||
if transition.add then
|
||||
buf = add(buf, transition.add)
|
||||
end
|
||||
|
||||
if not transition.silent then
|
||||
buf = add(buf, ch)
|
||||
end
|
||||
|
||||
-- and move to the next state
|
||||
state = transition.next
|
||||
i = n + 1
|
||||
end
|
||||
|
||||
-- If we ended in a non-line-terminating state
|
||||
if states[state].continue_line then
|
||||
-- buffer the output and keep going
|
||||
buf = add(buf, out)
|
||||
state = states[state].continue_line
|
||||
else
|
||||
-- otherwise, flush the buffer
|
||||
if buf then
|
||||
table.insert(buf, out)
|
||||
out = table.concat(buf)
|
||||
buf = nil
|
||||
end
|
||||
-- output the string and reset the state.
|
||||
table.insert(output, { nr = linenr, line = out})
|
||||
state = "any"
|
||||
end
|
||||
end
|
||||
end
|
||||
fd:close()
|
||||
typed.set_type(output, "LineList")
|
||||
return output
|
||||
end)
|
||||
|
||||
cpp.tokenize = typed("string -> table", function(line)
|
||||
return c99.match_preprocessing_grammar(line)
|
||||
end)
|
||||
|
||||
local function find_file(ctx, filename, mode, is_next)
|
||||
local paths = {}
|
||||
local current_dir = ctx.current_dir[#ctx.current_dir]
|
||||
if mode == "quote" or is_next then
|
||||
if not is_next then
|
||||
table.insert(paths, current_dir)
|
||||
end
|
||||
for _, incdir in ipairs(ctx.incdirs.quote or {}) do
|
||||
table.insert(paths, incdir)
|
||||
end
|
||||
end
|
||||
if mode == "system" or is_next then
|
||||
for _, incdir in ipairs(ctx.incdirs.system or {}) do
|
||||
table.insert(paths, incdir)
|
||||
end
|
||||
end
|
||||
if is_next then
|
||||
while paths[1] and paths[1] ~= current_dir do
|
||||
table.remove(paths, 1)
|
||||
end
|
||||
table.remove(paths, 1)
|
||||
end
|
||||
for _, path in ipairs(paths) do
|
||||
local pathname = path..SEP..filename
|
||||
local fd, err = io.open(pathname, "r")
|
||||
if fd then
|
||||
return pathname, fd
|
||||
end
|
||||
end
|
||||
return nil, nil, "file not found"
|
||||
end
|
||||
|
||||
local parse_expression = typed("{string} -> Exp?", function(tokens)
|
||||
local text = table.concat(tokens, " ")
|
||||
local exp, err, _, _, fragment = c99.match_preprocessing_expression_grammar(text)
|
||||
if not exp then
|
||||
print("Error parsing expression: " .. tostring(err) .. ": " .. text .. " AT " .. fragment)
|
||||
end
|
||||
return exp
|
||||
end)
|
||||
|
||||
local eval_exp
|
||||
eval_exp = typed("Ctx, Exp -> number", function(ctx, exp)
|
||||
debug(exp)
|
||||
|
||||
if not exp.op then
|
||||
local val = exp[1]
|
||||
typed.check(val, "string")
|
||||
local defined = ctx.defines[val]
|
||||
if defined then
|
||||
assert(type(defined) == "table")
|
||||
local subexp = parse_expression(defined)
|
||||
if not subexp then
|
||||
return 0 -- FIXME
|
||||
end
|
||||
return eval_exp(ctx, subexp)
|
||||
end
|
||||
val = val:gsub("U*L*$", "")
|
||||
if val:match("^0[xX]") then
|
||||
return tonumber(val) or 0
|
||||
elseif val:sub(1,1) == "0" then
|
||||
return tonumber(val, 8) or 0
|
||||
else
|
||||
return tonumber(val) or 0
|
||||
end
|
||||
elseif exp.op == "+" then
|
||||
if exp[2] then
|
||||
return eval_exp(ctx, exp[1]) + eval_exp(ctx, exp[2])
|
||||
else
|
||||
return eval_exp(ctx, exp[1])
|
||||
end
|
||||
elseif exp.op == "-" then
|
||||
if exp[2] then
|
||||
return eval_exp(ctx, exp[1]) - eval_exp(ctx, exp[2])
|
||||
else
|
||||
return -(eval_exp(ctx, exp[1]))
|
||||
end
|
||||
elseif exp.op == "*" then return eval_exp(ctx, exp[1]) * eval_exp(ctx, exp[2])
|
||||
elseif exp.op == "/" then return eval_exp(ctx, exp[1]) / eval_exp(ctx, exp[2])
|
||||
elseif exp.op == ">>" then return shr(eval_exp(ctx, exp[1]), eval_exp(ctx, exp[2])) -- FIXME C semantics
|
||||
elseif exp.op == "<<" then return shl(eval_exp(ctx, exp[1]), eval_exp(ctx, exp[2])) -- FIXME C semantics
|
||||
elseif exp.op == "==" then return (eval_exp(ctx, exp[1]) == eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == "!=" then return (eval_exp(ctx, exp[1]) ~= eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == ">=" then return (eval_exp(ctx, exp[1]) >= eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == "<=" then return (eval_exp(ctx, exp[1]) <= eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == ">" then return (eval_exp(ctx, exp[1]) > eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == "<" then return (eval_exp(ctx, exp[1]) < eval_exp(ctx, exp[2])) and 1 or 0
|
||||
elseif exp.op == "!" then return (eval_exp(ctx, exp[1]) == 1) and 0 or 1
|
||||
elseif exp.op == "&&" then
|
||||
for _, e in ipairs(exp) do
|
||||
if eval_exp(ctx, e) == 0 then
|
||||
return 0
|
||||
end
|
||||
end
|
||||
return 1
|
||||
elseif exp.op == "||" then
|
||||
for _, e in ipairs(exp) do
|
||||
if eval_exp(ctx, e) ~= 0 then
|
||||
return 1
|
||||
end
|
||||
end
|
||||
return 0
|
||||
elseif exp.op == "?" then
|
||||
if eval_exp(ctx, exp[1]) ~= 0 then
|
||||
return eval_exp(ctx, exp[2])
|
||||
else
|
||||
return eval_exp(ctx, exp[3])
|
||||
end
|
||||
elseif exp.op == "defined" then
|
||||
return (ctx.defines[exp[1][1]] ~= nil) and 1 or 0
|
||||
else
|
||||
error("unimplemented operator " .. tostring(exp.op))
|
||||
end
|
||||
end)
|
||||
|
||||
local consume_parentheses = typed("{string}, number, LineList, number -> {{string}}, number", function(tokens, start, linelist, cur)
|
||||
local args = {}
|
||||
local i = start + 1
|
||||
local arg = {}
|
||||
local stack = 0
|
||||
while true do
|
||||
local token = tokens[i]
|
||||
if token == nil then
|
||||
repeat
|
||||
cur = cur + 1
|
||||
if not linelist[cur] then
|
||||
error("unterminated function-like macro")
|
||||
end
|
||||
local nextline = linelist[cur].tk
|
||||
linelist[cur].tk = {}
|
||||
table.move(nextline, 1, #nextline, i, tokens)
|
||||
token = tokens[i]
|
||||
until token
|
||||
end
|
||||
if token == "(" then
|
||||
stack = stack + 1
|
||||
table.insert(arg, token)
|
||||
elseif token == ")" then
|
||||
if stack == 0 then
|
||||
if #arg > 0 then
|
||||
table.insert(args, arg)
|
||||
end
|
||||
break
|
||||
end
|
||||
stack = stack - 1
|
||||
table.insert(arg, token)
|
||||
elseif token == "," then
|
||||
if stack == 0 then
|
||||
table.insert(args, arg)
|
||||
arg = {}
|
||||
else
|
||||
table.insert(arg, token)
|
||||
end
|
||||
else
|
||||
table.insert(arg, token)
|
||||
end
|
||||
i = i + 1
|
||||
end
|
||||
return args, i
|
||||
end)
|
||||
|
||||
local function array_copy(t)
|
||||
local t2 = {}
|
||||
for i,v in ipairs(t) do
|
||||
t2[i] = v
|
||||
end
|
||||
return t2
|
||||
end
|
||||
|
||||
local function table_remove(list, pos, n)
|
||||
table.move(list, pos + n, #list + n, pos)
|
||||
end
|
||||
|
||||
local function table_replace_n_with(list, at, n, values)
|
||||
local old = #list
|
||||
debug("TRNW?", list, "AT", at, "N", n, "VALUES", values)
|
||||
--assert(is_sequence(list))
|
||||
local nvalues = #values
|
||||
local nils = n >= nvalues and (n - nvalues + 1) or 0
|
||||
if n ~= nvalues then
|
||||
table.move(list, at + n, #list + nils, at + nvalues)
|
||||
end
|
||||
debug("....", list)
|
||||
table.move(values, 1, nvalues, at, list)
|
||||
--assert(is_sequence(list))
|
||||
debug("TRNW!", list)
|
||||
assert(#list == old - n + #values)
|
||||
end
|
||||
|
||||
local stringify = typed("{string} -> string", function(tokens)
|
||||
return '"'..table.concat(tokens, " "):gsub("\"", "\\")..'"'
|
||||
end)
|
||||
|
||||
local macro_expand
|
||||
|
||||
local mark_noloop = typed("table, string, number -> ()", function(noloop, token, n)
|
||||
noloop[token] = math.max(noloop[token] or 0, n)
|
||||
end)
|
||||
|
||||
local shift_noloop = typed("table, number -> ()", function(noloop, n)
|
||||
for token, v in pairs(noloop) do
|
||||
noloop[token] = v + n
|
||||
end
|
||||
end)
|
||||
|
||||
local valid_noloop = typed("table, string, number -> boolean", function(noloop, token, n)
|
||||
return noloop[token] == nil or noloop[token] < n
|
||||
end)
|
||||
|
||||
local replace_args = typed("Ctx, {string}, table, LineList, number -> ()", function(ctx, tokens, args, linelist, cur)
|
||||
local i = 1
|
||||
local hash_next = false
|
||||
local join_next = false
|
||||
while true do
|
||||
local token = tokens[i]
|
||||
if not token then
|
||||
break
|
||||
end
|
||||
if token == "#" then
|
||||
hash_next = true
|
||||
table.remove(tokens, i)
|
||||
elseif token == "##" then
|
||||
join_next = true
|
||||
table.remove(tokens, i)
|
||||
elseif args[token] then
|
||||
macro_expand(ctx, args[token], linelist, cur, false)
|
||||
if hash_next then
|
||||
tokens[i] = stringify(args[token])
|
||||
hash_next = false
|
||||
elseif join_next then
|
||||
tokens[i - 1] = tokens[i - 1] .. table.concat(args[token], " ")
|
||||
table.remove(tokens, i)
|
||||
join_next = false
|
||||
else
|
||||
table_replace_n_with(tokens, i, 1, args[token])
|
||||
debug(token, args[token], tokens)
|
||||
i = i + #args[token]
|
||||
end
|
||||
elseif join_next then
|
||||
tokens[i - 1] = tokens[i - 1] .. tokens[i]
|
||||
table.remove(tokens, i)
|
||||
join_next = false
|
||||
else
|
||||
hash_next = false
|
||||
join_next = false
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
macro_expand = typed("Ctx, {string}, LineList, number, boolean -> ()", function(ctx, tokens, linelist, cur, expr_mode)
|
||||
local i = 1
|
||||
-- TODO propagate noloop into replace_args. recurse into macro_expand storing a proper offset internally.
|
||||
local noloop = {}
|
||||
while true do
|
||||
::continue::
|
||||
debug(i, tokens)
|
||||
local token = tokens[i]
|
||||
if not token then
|
||||
break
|
||||
end
|
||||
if expr_mode then
|
||||
if token == "defined" then
|
||||
if tokens[i + 1] == "(" then
|
||||
i = i + 2
|
||||
end
|
||||
i = i + 2
|
||||
goto continue
|
||||
end
|
||||
end
|
||||
local define = ctx.defines[token]
|
||||
if define and valid_noloop(noloop, token, i) then
|
||||
debug(token, define)
|
||||
local repl = define.repl
|
||||
if define.args then
|
||||
if tokens[i + 1] == "(" then
|
||||
local args, j = consume_parentheses(tokens, i + 1, linelist, cur)
|
||||
debug("args:", #args, args)
|
||||
local named_args = {}
|
||||
for i = 1, #define.args do
|
||||
named_args[define.args[i]] = args[i] or {}
|
||||
end
|
||||
local expansion = array_copy(repl)
|
||||
replace_args(ctx, expansion, named_args, linelist, cur)
|
||||
local nexpansion = #expansion
|
||||
local n = j - i + 1
|
||||
if nexpansion == 0 then
|
||||
table_remove(tokens, i, n)
|
||||
else
|
||||
table_replace_n_with(tokens, i, n, expansion)
|
||||
end
|
||||
shift_noloop(noloop, nexpansion - n)
|
||||
mark_noloop(noloop, token, i + nexpansion - 1)
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
local ndefine = #define
|
||||
if ndefine == 0 then
|
||||
table.remove(tokens, i)
|
||||
shift_noloop(noloop, -1)
|
||||
elseif ndefine == 1 then
|
||||
tokens[i] = define[1]
|
||||
mark_noloop(noloop, token, i)
|
||||
noloop[token] = math.max(noloop[token] or 0, i)
|
||||
else
|
||||
table_replace_n_with(tokens, i, 1, define)
|
||||
mark_noloop(noloop, token, i + ndefine - 1)
|
||||
end
|
||||
end
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
local run_expression = typed("Ctx, {string} -> boolean", function(ctx, tks)
|
||||
local exp = parse_expression(tks)
|
||||
return eval_exp(ctx, exp) ~= 0
|
||||
end)
|
||||
|
||||
cpp.parse_file = typed("string, FILE*?, Ctx? -> Ctx?, string?", function(filename, fd, ctx)
|
||||
if not ctx then
|
||||
ctx = {
|
||||
incdirs = cpp_include_paths(),
|
||||
defines = gcc_default_defines(),
|
||||
---@type any[]
|
||||
ifmode = { true },
|
||||
output = {},
|
||||
current_dir = {}
|
||||
}
|
||||
typed.set_type(ctx, "Ctx")
|
||||
-- if not absolute path
|
||||
if not filename:match("^/") then
|
||||
local found_name, found_fd = find_file(ctx, filename, "system")
|
||||
if found_fd then
|
||||
filename, fd = found_name, found_fd
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local current_dir = filename:gsub("/[^/]*$", "")
|
||||
if current_dir == filename then
|
||||
current_dir = "."
|
||||
local found_name, found_fd = find_file(ctx, filename, "system")
|
||||
if found_fd then
|
||||
filename, fd = found_name, found_fd
|
||||
end
|
||||
end
|
||||
table.insert(ctx.current_dir, current_dir)
|
||||
|
||||
local err
|
||||
if not fd then
|
||||
fd, err = io.open(filename, "rb")
|
||||
if not fd then
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
local linelist = cpp.initial_processing(fd)
|
||||
|
||||
for _, lineitem in ipairs(linelist) do
|
||||
lineitem.tk = cpp.tokenize(lineitem.line)
|
||||
end
|
||||
|
||||
local ifmode = ctx.ifmode
|
||||
for cur, lineitem in ipairs(linelist) do
|
||||
local line = lineitem.line
|
||||
local tk = lineitem.tk
|
||||
debug(filename, cur, ifmode[#ifmode], #ifmode, line)
|
||||
|
||||
if #ifmode == 1 and (tk.directive == "elif" or tk.directive == "else" or tk.directive == "endif") then
|
||||
return nil, "unexpected directive " .. tk.directive
|
||||
end
|
||||
|
||||
if tk.exp then
|
||||
macro_expand(ctx, tk.exp, linelist, cur, true)
|
||||
end
|
||||
|
||||
if ifmode[#ifmode] == true then
|
||||
if tk.directive then
|
||||
debug(tk)
|
||||
end
|
||||
if tk.directive == "define" then
|
||||
local k = tk.id
|
||||
local v = tk.args and tk or tk.repl
|
||||
ctx.defines[k] = v
|
||||
elseif tk.directive == "undef" then
|
||||
ctx.defines[tk.id] = nil
|
||||
elseif tk.directive == "ifdef" then
|
||||
table.insert(ifmode, (ctx.defines[tk.id] ~= nil))
|
||||
elseif tk.directive == "ifndef" then
|
||||
table.insert(ifmode, (ctx.defines[tk.id] == nil))
|
||||
elseif tk.directive == "if" then
|
||||
table.insert(ifmode, run_expression(ctx, tk.exp))
|
||||
elseif tk.directive == "elif" then
|
||||
ifmode[#ifmode] = "skip"
|
||||
elseif tk.directive == "else" then
|
||||
ifmode[#ifmode] = not ifmode[#ifmode]
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
elseif tk.directive == "error" or tk.directive == "pragma" then
|
||||
-- ignore
|
||||
elseif tk.directive == "include" or tk.directive == "include_next" then
|
||||
local name = tk.exp[1]
|
||||
local mode = tk.exp.mode
|
||||
local is_next = (tk.directive == "include_next")
|
||||
local inc_filename, inc_fd, err = find_file(ctx, name, mode, is_next)
|
||||
if not inc_filename then
|
||||
-- fall back to trying to load an #include "..." as #include <...>;
|
||||
-- this is necessary for Mac system headers
|
||||
inc_filename, inc_fd, err = find_file(ctx, name, "system", is_next)
|
||||
end
|
||||
if not inc_filename then
|
||||
return nil, name..":"..err
|
||||
end
|
||||
cpp.parse_file(inc_filename, inc_fd, ctx)
|
||||
else
|
||||
macro_expand(ctx, tk, linelist, cur, false)
|
||||
table.insert(ctx.output, table.concat(tk, " "))
|
||||
end
|
||||
elseif ifmode[#ifmode] == false then
|
||||
if tk.directive == "ifdef"
|
||||
or tk.directive == "ifndef"
|
||||
or tk.directive == "if" then
|
||||
table.insert(ifmode, "skip")
|
||||
elseif tk.directive == "else" then
|
||||
ifmode[#ifmode] = not ifmode[#ifmode]
|
||||
elseif tk.directive == "elif" then
|
||||
ifmode[#ifmode] = run_expression(ctx, tk.exp)
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
end
|
||||
elseif ifmode[#ifmode] == "skip" then
|
||||
if tk.directive == "ifdef"
|
||||
or tk.directive == "ifndef"
|
||||
or tk.directive == "if" then
|
||||
table.insert(ifmode, "skip")
|
||||
elseif tk.directive == "else"
|
||||
or tk.directive == "elif" then
|
||||
-- do nothing
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
table.remove(ctx.current_dir)
|
||||
|
||||
return ctx, nil
|
||||
end)
|
||||
|
||||
cpp.parse_context = typed("string, FILE*?, Ctx? -> Ctx?, string?", function(context, _, ctx)
|
||||
if not ctx then
|
||||
ctx = {
|
||||
incdirs = {},--,cpp_include_paths(),
|
||||
defines = {},--gcc_default_defines(),
|
||||
ifmode = { true },
|
||||
output = {},
|
||||
current_dir = {}
|
||||
}
|
||||
typed.set_type(ctx, "Ctx")
|
||||
end
|
||||
|
||||
local fd = {
|
||||
lines = function ()
|
||||
local n = 0
|
||||
return function ()
|
||||
if n == 0 then
|
||||
n = 1
|
||||
return context
|
||||
end
|
||||
return nil
|
||||
end
|
||||
end,
|
||||
close = function ()
|
||||
|
||||
end
|
||||
}
|
||||
|
||||
local linelist = cpp.initial_processing(fd)
|
||||
|
||||
for _, lineitem in ipairs(linelist) do
|
||||
lineitem.tk = cpp.tokenize(lineitem.line)
|
||||
end
|
||||
|
||||
local ifmode = ctx.ifmode
|
||||
for cur, lineitem in ipairs(linelist) do
|
||||
local line = lineitem.line
|
||||
local tk = lineitem.tk
|
||||
debug(cur, ifmode[#ifmode], #ifmode, line)
|
||||
|
||||
if #ifmode == 1 and (tk.directive == "elif" or tk.directive == "else" or tk.directive == "endif") then
|
||||
return nil, "unexpected directive " .. tk.directive
|
||||
end
|
||||
|
||||
if tk.exp then
|
||||
macro_expand(ctx, tk.exp, linelist, cur, true)
|
||||
end
|
||||
|
||||
if ifmode[#ifmode] == true then
|
||||
if tk.directive then
|
||||
debug(tk)
|
||||
end
|
||||
if tk.directive == "define" then
|
||||
local k = tk.id
|
||||
local v = tk.args and tk or tk.repl
|
||||
ctx.defines[k] = v
|
||||
elseif tk.directive == "undef" then
|
||||
ctx.defines[tk.id] = nil
|
||||
elseif tk.directive == "ifdef" then
|
||||
table.insert(ifmode, (ctx.defines[tk.id] ~= nil))
|
||||
elseif tk.directive == "ifndef" then
|
||||
table.insert(ifmode, (ctx.defines[tk.id] == nil))
|
||||
elseif tk.directive == "if" then
|
||||
table.insert(ifmode, run_expression(ctx, tk.exp))
|
||||
elseif tk.directive == "elif" then
|
||||
---@diagnostic disable-next-line: assign-type-mismatch
|
||||
ifmode[#ifmode] = "skip"
|
||||
elseif tk.directive == "else" then
|
||||
ifmode[#ifmode] = not ifmode[#ifmode]
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
elseif tk.directive == "error" or tk.directive == "pragma" then
|
||||
-- ignore
|
||||
elseif tk.directive == "include" or tk.directive == "include_next" then
|
||||
local name = tk.exp[1]
|
||||
local mode = tk.exp.mode
|
||||
local is_next = (tk.directive == "include_next")
|
||||
local inc_filename, inc_fd, err = find_file(ctx, name, mode, is_next)
|
||||
if not inc_filename then
|
||||
-- fall back to trying to load an #include "..." as #include <...>;
|
||||
-- this is necessary for Mac system headers
|
||||
inc_filename, inc_fd, err = find_file(ctx, name, "system", is_next)
|
||||
end
|
||||
if not inc_filename then
|
||||
return nil, name..":"..err
|
||||
end
|
||||
cpp.parse_file(inc_filename, inc_fd, ctx)
|
||||
else
|
||||
macro_expand(ctx, tk, linelist, cur, false)
|
||||
table.insert(ctx.output, table.concat(tk, " "))
|
||||
end
|
||||
elseif ifmode[#ifmode] == false then
|
||||
if tk.directive == "ifdef"
|
||||
or tk.directive == "ifndef"
|
||||
or tk.directive == "if" then
|
||||
table.insert(ifmode, "skip")
|
||||
elseif tk.directive == "else" then
|
||||
ifmode[#ifmode] = not ifmode[#ifmode]
|
||||
elseif tk.directive == "elif" then
|
||||
ifmode[#ifmode] = run_expression(ctx, tk.exp)
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
end
|
||||
elseif ifmode[#ifmode] == "skip" then
|
||||
if tk.directive == "ifdef"
|
||||
or tk.directive == "ifndef"
|
||||
or tk.directive == "if" then
|
||||
table.insert(ifmode, "skip")
|
||||
elseif tk.directive == "else"
|
||||
or tk.directive == "elif" then
|
||||
-- do nothing
|
||||
elseif tk.directive == "endif" then
|
||||
table.remove(ifmode, #ifmode)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
table.remove(ctx.current_dir)
|
||||
|
||||
return ctx, nil
|
||||
end)
|
||||
|
||||
cpp.expand_macro = typed("string, table -> string", function(macro, define_set)
|
||||
local ctx = typed.table("Ctx", setmetatable({
|
||||
defines = define_set,
|
||||
}, { __index = error, __newindex = error }))
|
||||
local tokens = { macro }
|
||||
local linelist = typed.table("LineList", { { nr = 1, line = macro } })
|
||||
macro_expand(ctx, tokens, linelist, 1, false)
|
||||
return table.concat(tokens, " ")
|
||||
end)
|
||||
|
||||
return cpp
|
||||
@@ -0,0 +1,604 @@
|
||||
local ctypes = { TESTMODE = false }
|
||||
|
||||
local inspect = require("inspect")
|
||||
local utility = require 'utility'
|
||||
local util = require 'plugins.ffi.c-parser.util'
|
||||
local typed = require("plugins.ffi.c-parser.typed")
|
||||
|
||||
local equal_declarations
|
||||
|
||||
local add_type = typed("TypeList, string, CType -> ()", function (lst, name, typ)
|
||||
lst[name] = typ
|
||||
table.insert(lst, { name = name, type = typ })
|
||||
end)
|
||||
|
||||
-- Compare two lists of declarations
|
||||
local equal_lists = typed("array, array -> boolean", function (l1, l2)
|
||||
if #l1 ~= #l2 then
|
||||
return false
|
||||
end
|
||||
for i, p1 in ipairs(l1) do
|
||||
local p2 = l2[i]
|
||||
if not equal_declarations(p1, p2) then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end)
|
||||
|
||||
equal_declarations = function (t1, t2)
|
||||
if type(t1) == "string" or type(t2) == "nil" then
|
||||
return t1 == t2
|
||||
end
|
||||
if not equal_declarations(t1.type, t2.type) then
|
||||
return false
|
||||
end
|
||||
-- if not equal_lists(t1.name, t2.name) then
|
||||
-- return false
|
||||
-- end
|
||||
if t1.type == "struct" then
|
||||
if t1.name ~= t2.name then
|
||||
return false
|
||||
end
|
||||
elseif t1.type == "function" then
|
||||
if not equal_declarations(t1.ret.type, t2.ret.type) then
|
||||
return false
|
||||
end
|
||||
if not equal_lists(t1.params, t2.params) then
|
||||
return false
|
||||
end
|
||||
if t1.vararg ~= t2.vararg then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function is_modifier(str)
|
||||
return str == "*" or str == "restrict" or str == "const"
|
||||
end
|
||||
|
||||
local function extract_modifiers(ret_pointer, items)
|
||||
while is_modifier(items[1]) do
|
||||
table.insert(ret_pointer, table.remove(items, 1))
|
||||
end
|
||||
end
|
||||
|
||||
local function get_name(name_src)
|
||||
local ret_pointer = {}
|
||||
if name_src == nil then
|
||||
return false, "could not find a name: " .. inspect(name_src), nil
|
||||
end
|
||||
local name
|
||||
local indices = {}
|
||||
if type(name_src) == "string" then
|
||||
if is_modifier(name_src) then
|
||||
table.insert(ret_pointer, name_src)
|
||||
else
|
||||
name = name_src
|
||||
end
|
||||
else
|
||||
name_src = name_src.declarator or name_src
|
||||
if type(name_src[1]) == "table" then
|
||||
extract_modifiers(ret_pointer, name_src[1])
|
||||
else
|
||||
extract_modifiers(ret_pointer, name_src)
|
||||
end
|
||||
for _, part in ipairs(name_src) do
|
||||
if part.idx then
|
||||
table.insert(indices, part.idx)
|
||||
end
|
||||
end
|
||||
name = name_src.name
|
||||
end
|
||||
return true, name, ret_pointer, next(indices) and indices
|
||||
end
|
||||
|
||||
local get_type
|
||||
local get_fields
|
||||
|
||||
local convert_value = typed("TypeList, table -> CType?, string?", function (lst, src)
|
||||
local name = nil
|
||||
local ret_pointer = {}
|
||||
local idxs = nil
|
||||
|
||||
if type(src.id) == "table" or type(src.ids) == "table" then
|
||||
src.id = util.expandSingle(src.id)
|
||||
src.ids = util.expandSingle(src.ids)
|
||||
-- FIXME multiple ids, e.g.: int *x, y, *z;
|
||||
local ok
|
||||
---@diagnostic disable-next-line: cast-local-type
|
||||
ok, name, ret_pointer, idxs = get_name(src.id or src.ids)
|
||||
if not ok then
|
||||
return nil, name
|
||||
end
|
||||
end
|
||||
|
||||
local typ, err = get_type(lst, src, ret_pointer)
|
||||
if not typ then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
return typed.table("CType", {
|
||||
name = name,
|
||||
type = typ,
|
||||
idxs = idxs,
|
||||
}), nil
|
||||
end)
|
||||
|
||||
local function convert_fields(field_src, fields)
|
||||
if field_src.ids then
|
||||
for _, id in ipairs(field_src.ids) do
|
||||
id.type = utility.deepCopy(field_src.type)
|
||||
if id.type and id[1] then
|
||||
for _, v in ipairs(id[1]) do
|
||||
table.insert(id.type, v)
|
||||
end
|
||||
if id[1].idx then
|
||||
id.isarray = true
|
||||
end
|
||||
id[1] = nil
|
||||
end
|
||||
table.insert(fields, id)
|
||||
end
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
-- Interpret field data from `field_src` and add it to `fields`.
|
||||
local function add_to_fields(lst, field_src, fields)
|
||||
if type(field_src) == "table" and not field_src.ids then
|
||||
assert(field_src.type.type == "union")
|
||||
local subfields, err = get_fields(lst, field_src.type.fields)
|
||||
if not subfields then
|
||||
return nil, err
|
||||
end
|
||||
for _, subfield in ipairs(subfields) do
|
||||
table.insert(fields, subfield)
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
if convert_fields(field_src, fields) then
|
||||
return true
|
||||
end
|
||||
local field, err = convert_value(lst, field_src)
|
||||
if not field then
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
|
||||
get_fields = function (lst, fields_src)
|
||||
local fields = {}
|
||||
for _, field_src in ipairs(fields_src) do
|
||||
local ok, err = add_to_fields(lst, field_src, fields)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
return fields
|
||||
end
|
||||
|
||||
local function get_enum_items(_, values)
|
||||
local items = {}
|
||||
for _, v in ipairs(values) do
|
||||
-- TODO store enum actual values
|
||||
table.insert(items, { name = v.id, value = v.value })
|
||||
end
|
||||
return items
|
||||
end
|
||||
|
||||
local function getAnonymousID(t)
|
||||
local v = tostring(t)
|
||||
local _, e = v:find("table: 0x", 0, true)
|
||||
return v:sub(e + 1)
|
||||
end
|
||||
|
||||
local get_composite_type = typed("TypeList, string?, string, array, string, function -> CType, string",
|
||||
function (lst, specid, spectype, parts, partsfield, get_parts)
|
||||
local name = specid
|
||||
local key = spectype .. "@" .. (name or ctypes.TESTMODE and 'anonymous' or getAnonymousID(parts))
|
||||
|
||||
if not lst[key] then
|
||||
-- Forward declaration
|
||||
lst[key] = typed.table("CType", {
|
||||
type = spectype,
|
||||
name = name,
|
||||
})
|
||||
end
|
||||
|
||||
if parts then
|
||||
local err
|
||||
parts, err = get_parts(lst, parts)
|
||||
if not parts then
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
|
||||
local typ = typed.table("CType", {
|
||||
type = spectype,
|
||||
name = name,
|
||||
[partsfield] = parts,
|
||||
})
|
||||
|
||||
if lst[key] then
|
||||
if typ[partsfield] and lst[key][partsfield] and not equal_declarations(typ, lst[key]) then
|
||||
return nil, "redeclaration for " .. key
|
||||
end
|
||||
end
|
||||
add_type(lst, key, typ)
|
||||
|
||||
return typ, key
|
||||
end)
|
||||
|
||||
local function get_structunion(lst, spec)
|
||||
if spec.fields and not spec.fields[1] then
|
||||
spec.fields = { spec.fields }
|
||||
end
|
||||
return get_composite_type(lst, spec.id, spec.type, spec.fields, "fields", get_fields)
|
||||
end
|
||||
|
||||
local function get_enum(lst, spec)
|
||||
if spec.values and not spec.values[1] then
|
||||
spec.values = { spec.values }
|
||||
end
|
||||
local typ, key = get_composite_type(lst, spec.id, spec.type, spec.values, "values", get_enum_items)
|
||||
if typ.values then
|
||||
for _, value in ipairs(typ.values) do
|
||||
add_type(lst, value.name, typ)
|
||||
end
|
||||
end
|
||||
return typ, key
|
||||
end
|
||||
|
||||
local function refer(lst, item, get_fn)
|
||||
if item.id and not item.fields then
|
||||
local key = item.type .. "@" .. item.id
|
||||
local su_typ = lst[key]
|
||||
if not su_typ then
|
||||
return {
|
||||
type = item.type,
|
||||
name = { item.id },
|
||||
}
|
||||
end
|
||||
return su_typ
|
||||
else
|
||||
local typ, key = get_fn(lst, item)
|
||||
if not typ then
|
||||
return nil, key
|
||||
end
|
||||
return typ
|
||||
end
|
||||
end
|
||||
|
||||
local calculate
|
||||
|
||||
local function binop(val, fn)
|
||||
local e1, e2 = calculate(val[1]), calculate(val[2])
|
||||
if type(e1) == "number" and type(e2) == "number" then
|
||||
return fn(e1, e2)
|
||||
else
|
||||
return { e1, e2, op = val.op }
|
||||
end
|
||||
end
|
||||
|
||||
calculate = function (val)
|
||||
if type(val) == "string" then
|
||||
return tonumber(val)
|
||||
end
|
||||
if val.op == "+" then
|
||||
return binop(val, function (a, b) return a + b end)
|
||||
elseif val.op == "-" then
|
||||
return binop(val, function (a, b) return a - b end)
|
||||
elseif val.op == "*" then
|
||||
return binop(val, function (a, b) return a * b end)
|
||||
elseif val.op == "/" then
|
||||
return binop(val, function (a, b) return a / b end)
|
||||
else
|
||||
return val
|
||||
end
|
||||
end
|
||||
|
||||
local base_types = {
|
||||
["char"] = true,
|
||||
["const"] = true,
|
||||
["bool"] = true,
|
||||
["double"] = true,
|
||||
["float"] = true,
|
||||
["int"] = true,
|
||||
["long"] = true,
|
||||
["short"] = true,
|
||||
["signed"] = true,
|
||||
["__signed"] = true,
|
||||
["__signed__"] = true,
|
||||
["unsigned"] = true,
|
||||
["void"] = true,
|
||||
["volatile"] = true,
|
||||
["ptrdiff_t"] = true,
|
||||
["size_t"] = true,
|
||||
["ssize_t"] = true,
|
||||
["wchar_t"] = true,
|
||||
["int8_t"] = true,
|
||||
["int16_t"] = true,
|
||||
["int32_t"] = true,
|
||||
["int64_t"] = true,
|
||||
["uint8_t"] = true,
|
||||
["uint16_t"] = true,
|
||||
["uint32_t"] = true,
|
||||
["uint64_t"] = true,
|
||||
["intptr_t"] = true,
|
||||
["uintptr_t"] = true,
|
||||
["__int8"] = true,
|
||||
["__int16"] = true,
|
||||
["__int32"] = true,
|
||||
["__int64"] = true,
|
||||
["_Bool"] = true,
|
||||
["__ptr32"] = true,
|
||||
["__ptr64"] = true,
|
||||
["_Complex"] = true,
|
||||
["complex"] = true,
|
||||
["__complex"] = true,
|
||||
["__complex__"] = true,
|
||||
["*"] = true,
|
||||
}
|
||||
|
||||
local qualifiers = {
|
||||
["extern"] = true,
|
||||
["static"] = true,
|
||||
["typedef"] = true,
|
||||
["restrict"] = true,
|
||||
["inline"] = true,
|
||||
["register"] = true,
|
||||
}
|
||||
|
||||
get_type = function (lst, spec, ret_pointer)
|
||||
local tarr = {}
|
||||
if type(spec.type) == "string" then
|
||||
spec.type = { spec.type }
|
||||
end
|
||||
if spec.type and not spec.type[1] then
|
||||
spec.type = { spec.type }
|
||||
end
|
||||
for _, part in ipairs(spec.type or spec) do
|
||||
if qualifiers[part] then
|
||||
-- skip
|
||||
elseif base_types[part] then
|
||||
table.insert(tarr, part)
|
||||
elseif lst[part] and lst[part].type == "typedef" then
|
||||
table.insert(tarr, part)
|
||||
elseif type(part) == "table" and part.type == "struct" or part.type == "union" then
|
||||
local su_typ, err = refer(lst, part, get_structunion)
|
||||
if not su_typ then
|
||||
return nil, err or "failed to refer struct"
|
||||
end
|
||||
table.insert(tarr, su_typ)
|
||||
elseif type(part) == "table" and part.type == "enum" then
|
||||
local en_typ, err = refer(lst, part, get_enum)
|
||||
if not en_typ then
|
||||
return nil, err or "failed to refer enum"
|
||||
end
|
||||
table.insert(tarr, en_typ)
|
||||
else
|
||||
return nil, "FIXME unknown type " .. inspect(spec)
|
||||
end
|
||||
end
|
||||
if #ret_pointer > 0 then
|
||||
for _, item in ipairs(ret_pointer) do
|
||||
if type(item) == "table" and item.idx then
|
||||
table.insert(tarr, { idx = calculate(item.idx) })
|
||||
else
|
||||
table.insert(tarr, item)
|
||||
end
|
||||
end
|
||||
end
|
||||
return tarr, nil
|
||||
end
|
||||
|
||||
local function is_void(param)
|
||||
return #param.type == 1 and param.type[1] == "void"
|
||||
end
|
||||
|
||||
local get_params = typed("TypeList, array -> array, boolean", function (lst, params_src)
|
||||
local params = {}
|
||||
local vararg = false
|
||||
|
||||
assert(not params_src.param)
|
||||
|
||||
for _, param_src in ipairs(params_src) do
|
||||
if param_src == "..." then
|
||||
vararg = true
|
||||
else
|
||||
local param, err = convert_value(lst, param_src.param)
|
||||
if not param then
|
||||
return nil, err
|
||||
end
|
||||
if not is_void(param) then
|
||||
table.insert(params, param)
|
||||
end
|
||||
end
|
||||
end
|
||||
return params, vararg
|
||||
end)
|
||||
|
||||
local register_many = function (register_item_fn, lst, ids, spec)
|
||||
for _, id in ipairs(ids) do
|
||||
local ok, err = register_item_fn(lst, id, spec)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
return true, nil
|
||||
end
|
||||
|
||||
local register_decl_item = function (lst, id, spec)
|
||||
local ok, name, ret_pointer, idxs = get_name(id.decl)
|
||||
if not ok then
|
||||
return false, name
|
||||
end
|
||||
assert(name)
|
||||
local ret_type, err = get_type(lst, spec, ret_pointer)
|
||||
if not ret_type then
|
||||
return false, err
|
||||
end
|
||||
local typ
|
||||
if id.decl.params then
|
||||
local params, vararg = get_params(lst, id.decl.params)
|
||||
if not params then
|
||||
return false, vararg
|
||||
end
|
||||
typ = typed.table("CType", {
|
||||
type = "function",
|
||||
name = name,
|
||||
idxs = idxs,
|
||||
ret = {
|
||||
type = ret_type,
|
||||
},
|
||||
params = params,
|
||||
vararg = vararg,
|
||||
})
|
||||
else
|
||||
typ = typed.table("CType", {
|
||||
type = ret_type,
|
||||
name = name,
|
||||
idxs = idxs,
|
||||
})
|
||||
end
|
||||
|
||||
if lst[name] then
|
||||
if not equal_declarations(lst[name], typ) then
|
||||
return false,
|
||||
"inconsistent declaration for " .. name .. " - " .. inspect(lst[name]) .. " VERSUS " .. inspect(typ)
|
||||
end
|
||||
end
|
||||
add_type(lst, name, typ)
|
||||
|
||||
return true, nil
|
||||
end
|
||||
|
||||
local register_decls = function (lst, ids, spec)
|
||||
return register_many(register_decl_item, lst, ids, spec)
|
||||
end
|
||||
|
||||
-- Convert an table produced by an `extern inline` declaration
|
||||
-- into one compatible with `register_decl`.
|
||||
local function register_function(lst, item)
|
||||
local id = {
|
||||
decl = {
|
||||
name = item.func.name,
|
||||
params = item.func.params,
|
||||
}
|
||||
}
|
||||
return register_decl_item(lst, id, item.spec)
|
||||
end
|
||||
|
||||
local function register_static_function(lst, item)
|
||||
return true
|
||||
end
|
||||
|
||||
local register_typedef_item = typed("TypeList, table, table -> boolean, string?", function (lst, id, spec)
|
||||
local ok, name, ret_pointer = get_name(id.decl)
|
||||
if not ok then
|
||||
return false, name or "failed"
|
||||
end
|
||||
local def, err = get_type(lst, spec, ret_pointer)
|
||||
if not def then
|
||||
return false, err or "failed"
|
||||
end
|
||||
local typ = typed.table("CType", {
|
||||
type = "typedef",
|
||||
name = name,
|
||||
def = def,
|
||||
})
|
||||
|
||||
if lst[name] then
|
||||
if not equal_declarations(lst[name], typ) then
|
||||
return false,
|
||||
"inconsistent declaration for " .. name .. " - " .. inspect(lst[name]) .. " VERSUS " .. inspect(typ)
|
||||
end
|
||||
end
|
||||
add_type(lst, name, typ)
|
||||
|
||||
return true, nil
|
||||
end)
|
||||
|
||||
local register_typedefs = function (lst, item)
|
||||
return register_many(register_typedef_item, lst, item.ids, item.spec)
|
||||
end
|
||||
|
||||
local function register_structunion(lst, item)
|
||||
return get_structunion(lst, item.spec)
|
||||
end
|
||||
|
||||
local function register_enum(lst, item)
|
||||
return get_enum(lst, item.spec)
|
||||
end
|
||||
|
||||
local function to_set(array)
|
||||
local set = {}
|
||||
for _, v in ipairs(array) do
|
||||
set[v] = true
|
||||
end
|
||||
return set
|
||||
end
|
||||
|
||||
ctypes.register_types = typed("{Decl} -> TypeList?, string?", function (parsed)
|
||||
local lst = typed.table("TypeList", {})
|
||||
for _, item in ipairs(parsed) do
|
||||
typed.check(item.spec, "table")
|
||||
local spec_set = to_set(item.spec)
|
||||
if spec_set.extern and item.ids then
|
||||
local ok, err = register_decls(lst, item.ids, item.spec)
|
||||
if not ok then
|
||||
return nil, err or "failed extern"
|
||||
end
|
||||
elseif spec_set.extern and item.func then
|
||||
local ok, err = register_function(lst, item)
|
||||
if not ok then
|
||||
return nil, err or "failed extern"
|
||||
end
|
||||
elseif spec_set.static and item.func then
|
||||
local ok, err = register_static_function(lst, item)
|
||||
if not ok then
|
||||
return nil, err or "failed static function"
|
||||
end
|
||||
elseif spec_set.typedef then
|
||||
local ok, err = register_typedefs(lst, item)
|
||||
if not ok then
|
||||
return nil, err or "failed typedef"
|
||||
end
|
||||
else
|
||||
local expandSingle <const> = {
|
||||
["struct"] = true,
|
||||
["union"] = true,
|
||||
["enum"] = true,
|
||||
}
|
||||
local spec = util.expandSingle(item.spec)
|
||||
if expandSingle[spec.type] then
|
||||
item.spec = spec
|
||||
end
|
||||
if item.spec.type == "struct" or item.spec.type == "union" then
|
||||
local ok, err = register_structunion(lst, item)
|
||||
if not ok then
|
||||
return nil, err or "failed struct/union"
|
||||
end
|
||||
elseif item.spec.type == "enum" then
|
||||
local ok, err = register_enum(lst, item)
|
||||
if not ok then
|
||||
return nil, err or "failed enum"
|
||||
end
|
||||
elseif not item.ids then
|
||||
-- forward declaration (e.g. "struct foo;")
|
||||
elseif item.ids then
|
||||
local ok, err = register_decls(lst, item.ids, item.spec)
|
||||
if not ok then
|
||||
return nil, err or "failed declaration"
|
||||
end
|
||||
else
|
||||
return nil, "FIXME Uncategorized declaration: " .. inspect(item)
|
||||
end
|
||||
end
|
||||
end
|
||||
return lst, nil
|
||||
end)
|
||||
|
||||
return ctypes
|
||||
@@ -0,0 +1,172 @@
|
||||
--------------------------------------------------------------------------------
|
||||
-- Lua programming with types
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
local _, inspect = pcall(require, "inspect")
|
||||
inspect = inspect or tostring
|
||||
|
||||
local typed = {}
|
||||
|
||||
local FAST = false
|
||||
|
||||
local function is_sequence(xs)
|
||||
if type(xs) ~= "table" then
|
||||
return false
|
||||
end
|
||||
if FAST then
|
||||
return true
|
||||
end
|
||||
local l = #xs
|
||||
for k, _ in pairs(xs) do
|
||||
if type(k) ~= "number" or k < 1 or k > l or math.floor(k) ~= k then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function type_of(t)
|
||||
local mt = getmetatable(t)
|
||||
return (mt and mt.__name) or (is_sequence(t) and "array") or type(t)
|
||||
end
|
||||
|
||||
local function set_type(t, typ)
|
||||
local mt = getmetatable(t)
|
||||
if not mt then
|
||||
mt = {}
|
||||
end
|
||||
mt.__name = typ
|
||||
return setmetatable(t, mt)
|
||||
end
|
||||
|
||||
local function typed_table(typ, t)
|
||||
return set_type(t, typ)
|
||||
end
|
||||
|
||||
local function try_check(val, expected)
|
||||
local optional = expected:match("^(.*)%?$")
|
||||
if optional then
|
||||
if val == nil then
|
||||
return true
|
||||
end
|
||||
expected = optional
|
||||
end
|
||||
|
||||
local seq_type = expected:match("^{(.+)}$")
|
||||
if seq_type then
|
||||
if type(val) == "table" then
|
||||
if FAST then
|
||||
return true
|
||||
end
|
||||
local allok = true
|
||||
for _, v in ipairs(val) do
|
||||
local ok = try_check(v, seq_type)
|
||||
if not ok then
|
||||
allok = false
|
||||
break
|
||||
end
|
||||
end
|
||||
if allok then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- if all we want is a table, don't perform further checks
|
||||
if expected == "table" and type(val) == "table" then
|
||||
return true
|
||||
end
|
||||
|
||||
local actual = type_of(val)
|
||||
if actual == expected then
|
||||
return true
|
||||
end
|
||||
return nil, actual
|
||||
end
|
||||
|
||||
local function typed_check(val, expected, category, n)
|
||||
local ok, actual = try_check(val, expected)
|
||||
if ok then
|
||||
return true
|
||||
end
|
||||
if category and n then
|
||||
error(("type error: %s %d: expected %s, got %s (%s)"):format(category, n, expected, actual, inspect(val)), category == "value" and 2 or 3)
|
||||
else
|
||||
error(("type error: expected %s, got %s (%s)"):format(expected, actual, inspect(val)), 2)
|
||||
end
|
||||
end
|
||||
|
||||
local function split(s, sep)
|
||||
local i, j, k = 1, s:find(sep, 1)
|
||||
local out = {}
|
||||
while j do
|
||||
table.insert(out, s:sub(i, j - 1))
|
||||
i = k + 1
|
||||
j, k = s:find(sep, i)
|
||||
end
|
||||
table.insert(out, s:sub(i, #s))
|
||||
return out
|
||||
end
|
||||
|
||||
local function typed_function(types, fn)
|
||||
local inp, outp = types:match("(.*[^%s])%s*%->%s*([^%s].*)")
|
||||
local ins = split(inp, ",%s*")
|
||||
local outs = split(outp, ",%s*")
|
||||
return function(...)
|
||||
local args = table.pack(...)
|
||||
if args.n ~= #ins then
|
||||
error("wrong number of inputs (given " .. args.n .. " - expects " .. types .. ")", 2)
|
||||
end
|
||||
for i = 1, #ins do
|
||||
typed_check(args[i], ins[i], "argument", i)
|
||||
end
|
||||
local rets = table.pack(fn(...))
|
||||
if outp == "()" then
|
||||
if rets.n ~= 0 then
|
||||
error("wrong number of outputs (given " .. rets.n .. " - expects " .. types .. ")", 2)
|
||||
end
|
||||
else
|
||||
if rets.n ~= #outs then
|
||||
error("wrong number of outputs (given " .. rets.n .. " - expects " .. types .. ")", 2)
|
||||
end
|
||||
if outs[1] ~= "*" then
|
||||
for i = 1, #outs do
|
||||
typed_check(rets[i], outs[i], "return", i)
|
||||
end
|
||||
end
|
||||
end
|
||||
return table.unpack(rets, 1, rets.n)
|
||||
end
|
||||
end
|
||||
|
||||
local typed_mt_on = {
|
||||
__call = function(_, types, fn)
|
||||
return typed_function(types, fn)
|
||||
end
|
||||
}
|
||||
|
||||
local typed_mt_off = {
|
||||
__call = function(_, _, fn)
|
||||
return fn
|
||||
end
|
||||
}
|
||||
|
||||
function typed.on()
|
||||
typed.check = typed_check
|
||||
typed.typed = typed_function
|
||||
typed.set_type = set_type
|
||||
typed.table = typed_table
|
||||
setmetatable(typed, typed_mt_on)
|
||||
end
|
||||
|
||||
function typed.off()
|
||||
typed.check = function() end
|
||||
typed.typed = function(_, fn) return fn end
|
||||
typed.set_type = function(t, _) return t end
|
||||
typed.table = function(_, t) return t end
|
||||
setmetatable(typed, typed_mt_off)
|
||||
end
|
||||
|
||||
typed.off()
|
||||
|
||||
return typed
|
||||
@@ -0,0 +1,28 @@
|
||||
local m = {}
|
||||
|
||||
local function tableLenEqual(t, len)
|
||||
for _key, _value in pairs(t) do
|
||||
len = len - 1
|
||||
if len < 0 then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function isSingleNode(ast)
|
||||
if type(ast) ~= 'table' then
|
||||
return false
|
||||
end
|
||||
local len = #ast
|
||||
return len == 1 and tableLenEqual(ast, len)
|
||||
end
|
||||
|
||||
function m.expandSingle(ast)
|
||||
if isSingleNode(ast) then
|
||||
return ast[1]
|
||||
end
|
||||
return ast
|
||||
end
|
||||
|
||||
return m
|
||||
Reference in New Issue
Block a user