mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
More testcases and fixes for the preprocessor.
This commit is contained in:
parent
6141cbb61d
commit
9527ea7033
15 changed files with 500 additions and 249 deletions
|
@ -278,6 +278,24 @@ public class LexerTests extends BaseTestCase {
|
||||||
eof();
|
eof();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testMinimalComment() throws Exception {
|
||||||
|
init("a/**/b/**/");
|
||||||
|
id("a");
|
||||||
|
comment("/**/");
|
||||||
|
id("b");
|
||||||
|
comment("/**/");
|
||||||
|
eof();
|
||||||
|
init("a//\nb//\r\nc");
|
||||||
|
id("a");
|
||||||
|
comment("//");
|
||||||
|
nl();
|
||||||
|
id("b");
|
||||||
|
comment("//");
|
||||||
|
nl();
|
||||||
|
id("c");
|
||||||
|
eof();
|
||||||
|
}
|
||||||
|
|
||||||
public void testHeaderName() throws Exception {
|
public void testHeaderName() throws Exception {
|
||||||
init("p\"'/*//\\\"");
|
init("p\"'/*//\\\"");
|
||||||
fLexer.setInsideIncludeDirective(true);
|
fLexer.setInsideIncludeDirective(true);
|
||||||
|
@ -316,7 +334,7 @@ public class LexerTests extends BaseTestCase {
|
||||||
init(ident, false, true);
|
init(ident, false, true);
|
||||||
final int idxDollar = ident.indexOf('$');
|
final int idxDollar = ident.indexOf('$');
|
||||||
id(ident.substring(0, idxDollar));
|
id(ident.substring(0, idxDollar));
|
||||||
token(IToken.tOTHER_CHARACTER, "$");
|
token(Lexer.tOTHER_CHARACTER, "$");
|
||||||
id(ident.substring(idxDollar+1));
|
id(ident.substring(idxDollar+1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -430,13 +448,13 @@ public class LexerTests extends BaseTestCase {
|
||||||
IToken.tLBRACE, IToken.tRBRACE, IToken.tPOUNDPOUND, IToken.tPOUND, IToken.tSEMI,
|
IToken.tLBRACE, IToken.tRBRACE, IToken.tPOUNDPOUND, IToken.tPOUND, IToken.tSEMI,
|
||||||
IToken.tCOLON, IToken.tELLIPSIS, IToken.tQUESTION, IToken.tDOT, IToken.tCOLONCOLON, IToken.tDOT,
|
IToken.tCOLON, IToken.tELLIPSIS, IToken.tQUESTION, IToken.tDOT, IToken.tCOLONCOLON, IToken.tDOT,
|
||||||
IToken.tDOTSTAR, IToken.tPLUS, IToken.tMINUS, IToken.tSTAR, IToken.tDIV, IToken.tMOD,
|
IToken.tDOTSTAR, IToken.tPLUS, IToken.tMINUS, IToken.tSTAR, IToken.tDIV, IToken.tMOD,
|
||||||
IToken.tXOR, IToken.tAMPER, IToken.tBITOR, IToken.tCOMPL, IToken.tASSIGN, IToken.tNOT,
|
IToken.tXOR, IToken.tAMPER, IToken.tBITOR, IToken.tBITCOMPLEMENT, IToken.tASSIGN, IToken.tNOT,
|
||||||
IToken.tLT, IToken.tGT, IToken.tPLUSASSIGN, IToken.tMINUSASSIGN, IToken.tSTARASSIGN,
|
IToken.tLT, IToken.tGT, IToken.tPLUSASSIGN, IToken.tMINUSASSIGN, IToken.tSTARASSIGN,
|
||||||
IToken.tDIVASSIGN, IToken.tMODASSIGN, IToken.tXORASSIGN, IToken.tAMPERASSIGN,
|
IToken.tDIVASSIGN, IToken.tMODASSIGN, IToken.tXORASSIGN, IToken.tAMPERASSIGN,
|
||||||
IToken.tBITORASSIGN, IToken.tSHIFTL, IToken.tSHIFTR, IToken.tSHIFTLASSIGN,
|
IToken.tBITORASSIGN, IToken.tSHIFTL, IToken.tSHIFTR, IToken.tSHIFTLASSIGN,
|
||||||
IToken.tSHIFTRASSIGN, IToken.tEQUAL, IToken.tNOTEQUAL, IToken.tLTEQUAL, IToken.tGTEQUAL,
|
IToken.tSHIFTRASSIGN, IToken.tEQUAL, IToken.tNOTEQUAL, IToken.tLTEQUAL, IToken.tGTEQUAL,
|
||||||
IToken.tAND, IToken.tOR, IToken.tINCR, IToken.tDECR, IToken.tCOMMA, IToken.tARROWSTAR,
|
IToken.tAND, IToken.tOR, IToken.tINCR, IToken.tDECR, IToken.tCOMMA, IToken.tARROWSTAR,
|
||||||
IToken.tARROW, IGCCToken.tMIN, IGCCToken.tMAX, IToken.tOTHER_CHARACTER,
|
IToken.tARROW, IGCCToken.tMIN, IGCCToken.tMAX, Lexer.tOTHER_CHARACTER,
|
||||||
};
|
};
|
||||||
|
|
||||||
for (int splices=0; splices<9; splices++) {
|
for (int splices=0; splices<9; splices++) {
|
||||||
|
@ -543,4 +561,48 @@ public class LexerTests extends BaseTestCase {
|
||||||
token(IToken.tOR);
|
token(IToken.tOR);
|
||||||
eof();
|
eof();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testNextDirective() throws Exception {
|
||||||
|
init("#if \n /*\n#*/ \"#\" '#' \\\n# ??/\n# \n## \n#\\\n# \n#??/\n# \n#ok \r\n#");
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("if");
|
||||||
|
fLexer.consumeLine(0);
|
||||||
|
assertEquals(Lexer.tNEWLINE, fLexer.currentToken().getType());
|
||||||
|
fLexer.nextDirective();
|
||||||
|
comment("/*\n#*/");
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("ok");
|
||||||
|
fLexer.nextDirective();
|
||||||
|
ws();
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
eof();
|
||||||
|
|
||||||
|
init("#if \n??=??= \n#??= \n??=# \n??=\\\n??= \n#\\\n??= \n??=\\\n# \n??=ok \n??=");
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("if");
|
||||||
|
fLexer.consumeLine(0);
|
||||||
|
assertEquals(Lexer.tNEWLINE, fLexer.currentToken().getType());
|
||||||
|
fLexer.nextDirective();
|
||||||
|
ws();
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("ok");
|
||||||
|
fLexer.nextDirective();
|
||||||
|
ws();
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
eof();
|
||||||
|
|
||||||
|
init("#if \n%:%: \n%:\\\n%: \n%:??/\n%: \n%:ok \n%:");
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("if");
|
||||||
|
fLexer.consumeLine(0);
|
||||||
|
assertEquals(Lexer.tNEWLINE, fLexer.currentToken().getType());
|
||||||
|
fLexer.nextDirective();
|
||||||
|
ws();
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
id("ok");
|
||||||
|
fLexer.nextDirective();
|
||||||
|
ws();
|
||||||
|
token(IToken.tPOUND);
|
||||||
|
eof();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -299,7 +299,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkError(prep[0], "", "",FN,0,0,1);
|
checkError(prep[0], "", "",FN,0,0,1);
|
||||||
checkError(prep[1], new String(DIGITS), "12", FN,0,16,1);
|
checkError(prep[1], "012", "12", FN,0,3,1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPragma() {
|
public void testPragma() {
|
||||||
|
@ -309,7 +309,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkPragma(prep[0], "", "", FN,0,0,1);
|
checkPragma(prep[0], "", "", FN,0,0,1);
|
||||||
checkPragma(prep[1], new String(DIGITS), "12", FN,0,16,1);
|
checkPragma(prep[1], "012", "12", FN,0,3,1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIncludes() {
|
public void testIncludes() {
|
||||||
|
@ -319,7 +319,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorIncludeStatement[] includes= fLocationMap.getIncludeDirectives();
|
IASTPreprocessorIncludeStatement[] includes= fLocationMap.getIncludeDirectives();
|
||||||
assertEquals(2, includes.length);
|
assertEquals(2, includes.length);
|
||||||
checkInclude(includes[0], "", "", "n1", "", true, false, FN, 0, 0, 1, 0, 0);
|
checkInclude(includes[0], "", "", "n1", "", true, false, FN, 0, 0, 1, 0, 0);
|
||||||
checkInclude(includes[1], new String(DIGITS), "12", "n2", "f2", false, true, FN, 0, 16, 1, 1, 2);
|
checkInclude(includes[1], "012", "12", "n2", "f2", false, true, FN, 0, 3, 1, 1, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIf() {
|
public void testIf() {
|
||||||
|
@ -329,7 +329,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkIf(prep[0], "", "", false, FN, 0, 0, 1);
|
checkIf(prep[0], "", "", false, FN, 0, 0, 1);
|
||||||
checkIf(prep[1], new String(DIGITS), "12", true, FN, 0, 16, 1);
|
checkIf(prep[1], "012", "12", true, FN, 0, 3, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIfdef() {
|
public void testIfdef() {
|
||||||
|
@ -339,7 +339,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkIfdef(prep[0], "", "", false, FN, 0, 0, 1);
|
checkIfdef(prep[0], "", "", false, FN, 0, 0, 1);
|
||||||
checkIfdef(prep[1], new String(DIGITS), "12", true, FN, 0, 16, 1);
|
checkIfdef(prep[1], "012", "12", true, FN, 0, 3, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIfndef() {
|
public void testIfndef() {
|
||||||
|
@ -349,7 +349,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkIfndef(prep[0], "", "", false, FN, 0, 0, 1);
|
checkIfndef(prep[0], "", "", false, FN, 0, 0, 1);
|
||||||
checkIfndef(prep[1], new String(DIGITS), "12", true, FN, 0, 16, 1);
|
checkIfndef(prep[1], "012", "12", true, FN, 0, 3, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testElif() {
|
public void testElif() {
|
||||||
|
@ -359,7 +359,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkElif(prep[0], "", "", false, FN, 0, 0, 1);
|
checkElif(prep[0], "", "", false, FN, 0, 0, 1);
|
||||||
checkElif(prep[1], new String(DIGITS), "12", true, FN, 0, 16, 1);
|
checkElif(prep[1], "012", "12", true, FN, 0, 3, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testElse() {
|
public void testElse() {
|
||||||
|
@ -430,7 +430,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
IASTPreprocessorStatement[] prep= fLocationMap.getAllPreprocessorStatements();
|
||||||
assertEquals(2, prep.length);
|
assertEquals(2, prep.length);
|
||||||
checkMacroUndef(prep[0], null, "", "n1", "", FN, 0, 0, 1, 0, 0);
|
checkMacroUndef(prep[0], null, "", "n1", "", FN, 0, 0, 1, 0, 0);
|
||||||
checkMacroUndef(prep[1], macro1, new String(DIGITS), "n2", "3456", FN, 0, 16, 1, 3, 4);
|
checkMacroUndef(prep[1], macro1, "0123456", "n2", "3456", FN, 0, 7, 1, 3, 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMacroExpansion() {
|
public void testMacroExpansion() {
|
||||||
|
@ -551,7 +551,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
|
|
||||||
inclusions= inclusions[0].getNestedInclusions();
|
inclusions= inclusions[0].getNestedInclusions();
|
||||||
assertEquals(1, inclusions.length);
|
assertEquals(1, inclusions.length);
|
||||||
checkInclude(inclusions[0].getIncludeDirective(), "b4b", "4", "pre11", "pre11", false, true, "pre1", 6, 3, 1, 7, 1);
|
checkInclude(inclusions[0].getIncludeDirective(), "b4", "4", "pre11", "pre11", false, true, "pre1", 6, 2, 1, 7, 1);
|
||||||
assertEquals(0, inclusions[0].getNestedInclusions().length);
|
assertEquals(0, inclusions[0].getNestedInclusions().length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1763,7 +1763,7 @@ public class PortedScannerTests extends PreprocessorTestsBase {
|
||||||
initializeScanner(writer.toString());
|
initializeScanner(writer.toString());
|
||||||
fullyTokenize();
|
fullyTokenize();
|
||||||
IASTProblem[] problems= fLocationResolver.getScannerProblems();
|
IASTProblem[] problems= fLocationResolver.getScannerProblems();
|
||||||
assertEquals(16, problems.length);
|
assertEquals(17, problems.length);
|
||||||
int i= 0;
|
int i= 0;
|
||||||
assertEquals(IProblem.SCANNER_BAD_OCTAL_FORMAT, problems[i].getID() );
|
assertEquals(IProblem.SCANNER_BAD_OCTAL_FORMAT, problems[i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_BAD_DECIMAL_FORMAT, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_BAD_DECIMAL_FORMAT, problems[++i].getID() );
|
||||||
|
@ -1775,6 +1775,7 @@ public class PortedScannerTests extends PreprocessorTestsBase {
|
||||||
assertEquals(IProblem.SCANNER_ILLEGAL_IDENTIFIER, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_ILLEGAL_IDENTIFIER, problems[++i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_BAD_CONDITIONAL_EXPRESSION,problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_BAD_CONDITIONAL_EXPRESSION,problems[++i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
||||||
|
assertEquals(IProblem.SCANNER_BAD_CHARACTER, problems[++i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
||||||
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
assertEquals(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, problems[++i].getID() );
|
||||||
|
|
|
@ -345,6 +345,7 @@ public class PreprocessorTests extends PreprocessorTestsBase {
|
||||||
validateEOF();
|
validateEOF();
|
||||||
validateProblemCount(0);
|
validateProblemCount(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// #define OBJ __VA_ARGS__
|
// #define OBJ __VA_ARGS__
|
||||||
// #define func(x) __VA_ARGS__
|
// #define func(x) __VA_ARGS__
|
||||||
// OBJ;
|
// OBJ;
|
||||||
|
@ -398,4 +399,94 @@ public class PreprocessorTests extends PreprocessorTestsBase {
|
||||||
validateEOF();
|
validateEOF();
|
||||||
validateProblemCount(0);
|
validateProblemCount(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #define ONE(a, ...) int x
|
||||||
|
// #define TWO(b, args...) int y
|
||||||
|
// ONE("string");
|
||||||
|
// TWO("string");
|
||||||
|
public void testSkippingVarags() throws Exception {
|
||||||
|
initializeScanner();
|
||||||
|
validateToken(IToken.t_int);
|
||||||
|
validateIdentifier("x");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateToken(IToken.t_int);
|
||||||
|
validateIdentifier("y");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateEOF();
|
||||||
|
validateProblemCount(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// #define eval(f,x) f(x)
|
||||||
|
// #define m(x) m[x]
|
||||||
|
// eval(m,y);
|
||||||
|
public void testReconsiderArgsForExpansion() throws Exception {
|
||||||
|
initializeScanner();
|
||||||
|
validateIdentifier("m");
|
||||||
|
validateToken(IToken.tLBRACKET);
|
||||||
|
validateIdentifier("y");
|
||||||
|
validateToken(IToken.tRBRACKET);
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateEOF();
|
||||||
|
validateProblemCount(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
//#define f\
|
||||||
|
//(x) ok
|
||||||
|
// f(x)
|
||||||
|
public void testLineSpliceInMacroDefinition() throws Exception {
|
||||||
|
initializeScanner();
|
||||||
|
validateIdentifier("ok");
|
||||||
|
validateEOF();
|
||||||
|
validateProblemCount(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// #define f() fval
|
||||||
|
// #define nospace f()f()
|
||||||
|
// #define space f() f()
|
||||||
|
// #define str(x) #x
|
||||||
|
// #define xstr(x) str(x)
|
||||||
|
// #define tp1(x,y,z) [x ## y ## z]
|
||||||
|
// #define tp2(x,y,z) [ x ## y ## z ]
|
||||||
|
// #define tstr1(x,y) [#x#y]
|
||||||
|
// #define tstr2(x,y) [ #x #y ]
|
||||||
|
// xstr(nospace);
|
||||||
|
// xstr(space);
|
||||||
|
// xstr(tp1(a b, c d , e f));
|
||||||
|
// xstr(tp2(a b, c d , e f));
|
||||||
|
// xstr(tp1(a-b, c-d , e-f));
|
||||||
|
// xstr(tp2(a-b, c-d , e-f));
|
||||||
|
// xstr(tstr1(a b, c d));
|
||||||
|
// xstr(tstr2(a b, c d));
|
||||||
|
public void testSpaceInStringify() throws Exception {
|
||||||
|
initializeScanner();
|
||||||
|
validateString("fvalfval");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("fval fval");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[a bc de f]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[ a bc de f ]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[a-bc-de-f]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[ a-bc-de-f ]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[\\\"a b\\\"\\\"c d\\\"]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateString("[ \\\"a b\\\" \\\"c d\\\" ]");
|
||||||
|
validateToken(IToken.tSEMI);
|
||||||
|
|
||||||
|
validateEOF();
|
||||||
|
validateProblemCount(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,8 @@
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
* http://www.eclipse.org/legal/epl-v10.html
|
||||||
*
|
*
|
||||||
* Contributors:
|
* Contributors:
|
||||||
* IBM Rational Software - Initial API and implementation
|
* IBM Rational Software - Initial API and implementation
|
||||||
|
* Markus Schorn (Wind River Systems)
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.core.parser;
|
package org.eclipse.cdt.core.parser;
|
||||||
|
|
||||||
|
@ -118,7 +119,6 @@ public interface IToken {
|
||||||
static public final int tDOT = 50;
|
static public final int tDOT = 50;
|
||||||
static public final int tDIVASSIGN = 51;
|
static public final int tDIVASSIGN = 51;
|
||||||
static public final int tDIV = 52;
|
static public final int tDIV = 52;
|
||||||
static public final int tOTHER_CHARACTER= 53;
|
|
||||||
|
|
||||||
/** @deprecated use {@link #tAND} */
|
/** @deprecated use {@link #tAND} */
|
||||||
static public final int t_and = 54;
|
static public final int t_and = 54;
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfdefStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfndefStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfndefStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorObjectStyleMacroDefinition;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorPragmaStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorPragmaStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorUndefStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorUndefStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||||
|
@ -125,12 +126,10 @@ class ASTComment extends ASTPreprocessorNode implements IASTComment {
|
||||||
|
|
||||||
abstract class ASTDirectiveWithCondition extends ASTPreprocessorNode {
|
abstract class ASTDirectiveWithCondition extends ASTPreprocessorNode {
|
||||||
private final int fConditionOffset;
|
private final int fConditionOffset;
|
||||||
private final int fConditionLength;
|
|
||||||
private final boolean fActive;
|
private final boolean fActive;
|
||||||
public ASTDirectiveWithCondition(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
public ASTDirectiveWithCondition(IASTTranslationUnit parent, int startNumber, int condNumber, int endNumber, boolean active) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||||
fConditionOffset= condNumber;
|
fConditionOffset= condNumber;
|
||||||
fConditionLength= condEndNumber-condNumber;
|
|
||||||
fActive= active;
|
fActive= active;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,7 +138,7 @@ abstract class ASTDirectiveWithCondition extends ASTPreprocessorNode {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getConditionString() {
|
public String getConditionString() {
|
||||||
return getSource(fConditionOffset, fConditionLength);
|
return getSource(fConditionOffset, getOffset() + getLength() - fConditionOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
public char[] getCondition() {
|
public char[] getCondition() {
|
||||||
|
@ -154,8 +153,8 @@ class ASTEndif extends ASTPreprocessorNode implements IASTPreprocessorEndifState
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTElif extends ASTDirectiveWithCondition implements IASTPreprocessorElifStatement {
|
class ASTElif extends ASTDirectiveWithCondition implements IASTPreprocessorElifStatement {
|
||||||
public ASTElif(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
public ASTElif(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, boolean active) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
super(parent, startNumber, condNumber, condEndNumber, active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,26 +170,26 @@ class ASTElse extends ASTPreprocessorNode implements IASTPreprocessorElseStateme
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTIfndef extends ASTDirectiveWithCondition implements IASTPreprocessorIfndefStatement {
|
class ASTIfndef extends ASTDirectiveWithCondition implements IASTPreprocessorIfndefStatement {
|
||||||
public ASTIfndef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
public ASTIfndef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, boolean active) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
super(parent, startNumber, condNumber, condEndNumber, active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTIfdef extends ASTDirectiveWithCondition implements IASTPreprocessorIfdefStatement {
|
class ASTIfdef extends ASTDirectiveWithCondition implements IASTPreprocessorIfdefStatement {
|
||||||
public ASTIfdef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
public ASTIfdef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, boolean active) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
super(parent, startNumber, condNumber, condEndNumber, active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTIf extends ASTDirectiveWithCondition implements IASTPreprocessorIfStatement {
|
class ASTIf extends ASTDirectiveWithCondition implements IASTPreprocessorIfStatement {
|
||||||
public ASTIf(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
public ASTIf(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, boolean active) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
super(parent, startNumber, condNumber, condEndNumber, active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTError extends ASTDirectiveWithCondition implements IASTPreprocessorErrorStatement {
|
class ASTError extends ASTDirectiveWithCondition implements IASTPreprocessorErrorStatement {
|
||||||
public ASTError(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber) {
|
public ASTError(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, true);
|
super(parent, startNumber, condNumber, condEndNumber, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public char[] getMessage() {
|
public char[] getMessage() {
|
||||||
|
@ -199,8 +198,8 @@ class ASTError extends ASTDirectiveWithCondition implements IASTPreprocessorErro
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTPragma extends ASTDirectiveWithCondition implements IASTPreprocessorPragmaStatement {
|
class ASTPragma extends ASTDirectiveWithCondition implements IASTPreprocessorPragmaStatement {
|
||||||
public ASTPragma(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber) {
|
public ASTPragma(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber) {
|
||||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, true);
|
super(parent, startNumber, condNumber, condEndNumber, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public char[] getMessage() {
|
public char[] getMessage() {
|
||||||
|
@ -215,9 +214,9 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
||||||
private final boolean fIsResolved;
|
private final boolean fIsResolved;
|
||||||
private final boolean fIsSystemInclude;
|
private final boolean fIsSystemInclude;
|
||||||
|
|
||||||
public ASTInclusionStatement(IASTTranslationUnit parent, int startNumber, int nameStartNumber, int nameEndNumber, int endNumber,
|
public ASTInclusionStatement(IASTTranslationUnit parent, int startNumber, int nameStartNumber, int nameEndNumber,
|
||||||
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, nameEndNumber);
|
||||||
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
||||||
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
||||||
fIsActive= active;
|
fIsActive= active;
|
||||||
|
@ -246,7 +245,7 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorMacroDefinition {
|
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyleMacroDefinition {
|
||||||
private final ASTPreprocessorName fName;
|
private final ASTPreprocessorName fName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -335,8 +334,8 @@ class ASTFunctionMacro extends ASTMacro implements IASTPreprocessorFunctionStyle
|
||||||
|
|
||||||
class ASTUndef extends ASTPreprocessorNode implements IASTPreprocessorUndefStatement {
|
class ASTUndef extends ASTPreprocessorNode implements IASTPreprocessorUndefStatement {
|
||||||
private final IASTName fName;
|
private final IASTName fName;
|
||||||
public ASTUndef(IASTTranslationUnit parent, char[] name, int startNumber, int nameNumber, int nameEndNumber, int endNumber, IBinding binding) {
|
public ASTUndef(IASTTranslationUnit parent, char[] name, int startNumber, int nameNumber, int nameEndNumber, IBinding binding) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, nameEndNumber);
|
||||||
fName= new ASTPreprocessorName(this, IASTPreprocessorUndefStatement.MACRO_NAME, nameNumber, nameEndNumber, name, binding);
|
fName= new ASTPreprocessorName(this, IASTPreprocessorUndefStatement.MACRO_NAME, nameNumber, nameEndNumber, name, binding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,13 +51,24 @@ import org.eclipse.cdt.internal.core.parser.scanner2.ScannerUtility;
|
||||||
* you should be using the {@link IScanner} interface.
|
* you should be using the {@link IScanner} interface.
|
||||||
* @since 5.0
|
* @since 5.0
|
||||||
*/
|
*/
|
||||||
|
/**
|
||||||
|
* @since 5.0
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @since 5.0
|
||||||
|
*
|
||||||
|
*/
|
||||||
public class CPreprocessor implements ILexerLog, IScanner {
|
public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
|
public static final String PROP_VALUE = "CPreprocessor"; //$NON-NLS-1$
|
||||||
|
|
||||||
public static final int tDEFINED= IToken.FIRST_RESERVED_PREPROCESSOR;
|
public static final int tDEFINED= IToken.FIRST_RESERVED_PREPROCESSOR;
|
||||||
public static final int tEXPANDED_IDENTIFIER= IToken.FIRST_RESERVED_PREPROCESSOR+1;
|
public static final int tEXPANDED_IDENTIFIER= IToken.FIRST_RESERVED_PREPROCESSOR+1;
|
||||||
public static final int tSCOPE_MARKER= IToken.FIRST_RESERVED_PREPROCESSOR+2;
|
public static final int tSCOPE_MARKER= IToken.FIRST_RESERVED_PREPROCESSOR+2;
|
||||||
public static final int tSPACE= IToken.FIRST_RESERVED_PREPROCESSOR+3;
|
public static final int tSPACE= IToken.FIRST_RESERVED_PREPROCESSOR+3;
|
||||||
public static final int tMACRO_PARAMETER= IToken.FIRST_RESERVED_PREPROCESSOR+4;
|
public static final int tNOSPACE= IToken.FIRST_RESERVED_PREPROCESSOR+4;
|
||||||
public static final int tEMPTY_TOKEN = IToken.FIRST_RESERVED_PREPROCESSOR+5;
|
public static final int tMACRO_PARAMETER= IToken.FIRST_RESERVED_PREPROCESSOR+5;
|
||||||
|
public static final int tEMPTY_TOKEN = IToken.FIRST_RESERVED_PREPROCESSOR+6;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -418,9 +429,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
Token t1= fPrefetchedToken;
|
Token t1= fPrefetchedToken;
|
||||||
if (t1 == null) {
|
if (t1 == null) {
|
||||||
t1= fetchTokenFromPreprocessor();
|
t1= fetchTokenFromPreprocessor();
|
||||||
final int offset= fLocationMap.getSequenceNumberForOffset(t1.getOffset());
|
adjustOffsets(t1);
|
||||||
final int endOffset= fLocationMap.getSequenceNumberForOffset(t1.getEndOffset());
|
|
||||||
t1.setOffset(offset, endOffset);
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
fPrefetchedToken= null;
|
fPrefetchedToken= null;
|
||||||
|
@ -442,6 +451,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
int endOffset= 0;
|
int endOffset= 0;
|
||||||
loop: while(true) {
|
loop: while(true) {
|
||||||
t2= fetchTokenFromPreprocessor();
|
t2= fetchTokenFromPreprocessor();
|
||||||
|
adjustOffsets(t2);
|
||||||
final int tt2= t2.getType();
|
final int tt2= t2.getType();
|
||||||
switch(tt2) {
|
switch(tt2) {
|
||||||
case IToken.tLSTRING:
|
case IToken.tLSTRING:
|
||||||
|
@ -481,6 +491,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return t1;
|
return t1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void adjustOffsets(Token t1) {
|
||||||
|
final int offset= fLocationMap.getSequenceNumberForOffset(t1.getOffset());
|
||||||
|
final int endOffset= fLocationMap.getSequenceNumberForOffset(t1.getEndOffset());
|
||||||
|
t1.setOffset(offset, endOffset);
|
||||||
|
t1.setNext(null);
|
||||||
|
}
|
||||||
|
|
||||||
private void appendStringContent(StringBuffer buf, Token t1) {
|
private void appendStringContent(StringBuffer buf, Token t1) {
|
||||||
final char[] image= t1.getCharImage();
|
final char[] image= t1.getCharImage();
|
||||||
final int start= image[0]=='"' ? 1 : 2;
|
final int start= image[0]=='"' ? 1 : 2;
|
||||||
|
@ -509,6 +526,15 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
ppToken= fCurrentContext.nextPPToken();
|
ppToken= fCurrentContext.nextPPToken();
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
case Lexer.tOTHER_CHARACTER:
|
||||||
|
if (!fExpandingMacro) {
|
||||||
|
handleProblem(IProblem.SCANNER_BAD_CHARACTER, ppToken.getCharImage(),
|
||||||
|
ppToken.getOffset(), ppToken.getEndOffset());
|
||||||
|
ppToken= fCurrentContext.nextPPToken();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case Lexer.tEND_OF_INPUT:
|
case Lexer.tEND_OF_INPUT:
|
||||||
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||||
if (locationCtx != null) {
|
if (locationCtx != null) {
|
||||||
|
@ -549,7 +575,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return ppToken;
|
return ppToken;
|
||||||
|
|
||||||
case IToken.tINTEGER:
|
case IToken.tINTEGER:
|
||||||
if (fCheckNumbers) {
|
if (fCheckNumbers && !fExpandingMacro) {
|
||||||
checkNumber(ppToken, false);
|
checkNumber(ppToken, false);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -878,7 +904,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
endOffset= lexer.currentToken().getEndOffset();
|
endOffset= lexer.currentToken().getEndOffset();
|
||||||
if (fCurrentContext.changeBranch(ScannerContext.BRANCH_END)) {
|
if (fCurrentContext.changeBranch(ScannerContext.BRANCH_END)) {
|
||||||
fLocationMap.encounterPoundEndIf(startOffset, endOffset);
|
fLocationMap.encounterPoundEndIf(startOffset, condEndOffset);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
handleProblem(IProblem.PREPROCESSOR_UNBALANCE_CONDITION, name, startOffset, endOffset);
|
handleProblem(IProblem.PREPROCESSOR_UNBALANCE_CONDITION, name, startOffset, endOffset);
|
||||||
|
@ -889,11 +915,11 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
condOffset= lexer.nextToken().getOffset();
|
condOffset= lexer.nextToken().getOffset();
|
||||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
endOffset= lexer.currentToken().getEndOffset();
|
endOffset= lexer.currentToken().getEndOffset();
|
||||||
final char[] warning= lexer.getInputChars(condOffset, endOffset);
|
final char[] warning= lexer.getInputChars(condOffset, condEndOffset);
|
||||||
final int id= type == IPreprocessorDirective.ppError
|
final int id= type == IPreprocessorDirective.ppError
|
||||||
? IProblem.PREPROCESSOR_POUND_ERROR
|
? IProblem.PREPROCESSOR_POUND_ERROR
|
||||||
: IProblem.PREPROCESSOR_POUND_WARNING;
|
: IProblem.PREPROCESSOR_POUND_WARNING;
|
||||||
handleProblem(id, warning, startOffset, endOffset);
|
handleProblem(id, warning, condOffset, condEndOffset);
|
||||||
fLocationMap.encounterPoundError(startOffset, condOffset, condEndOffset, endOffset);
|
fLocationMap.encounterPoundError(startOffset, condOffset, condEndOffset, endOffset);
|
||||||
break;
|
break;
|
||||||
case IPreprocessorDirective.ppPragma:
|
case IPreprocessorDirective.ppPragma:
|
||||||
|
@ -1050,12 +1076,12 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
ObjectStyleMacro macrodef = fMacroDefinitionParser.parseMacroDefinition(lexer, this);
|
ObjectStyleMacro macrodef = fMacroDefinitionParser.parseMacroDefinition(lexer, this);
|
||||||
fMacroDictionary.put(macrodef.getNameCharArray(), macrodef);
|
fMacroDictionary.put(macrodef.getNameCharArray(), macrodef);
|
||||||
final Token name= fMacroDefinitionParser.getNameToken();
|
final Token name= fMacroDefinitionParser.getNameToken();
|
||||||
final int endOffset= lexer.currentToken().getEndOffset();
|
|
||||||
fLocationMap.encounterPoundDefine(startOffset, name.getOffset(), name.getEndOffset(),
|
fLocationMap.encounterPoundDefine(startOffset, name.getOffset(), name.getEndOffset(),
|
||||||
fMacroDefinitionParser.getExpansionOffset(), endOffset, macrodef);
|
macrodef.getExpansionOffset(), macrodef.getExpansionEndOffset(), macrodef);
|
||||||
} catch (InvalidMacroDefinitionException e) {
|
} catch (InvalidMacroDefinitionException e) {
|
||||||
int end= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
handleProblem(IProblem.PREPROCESSOR_INVALID_MACRO_DEFN, e.fName, startOffset, end);
|
handleProblem(IProblem.PREPROCESSOR_INVALID_MACRO_DEFN, e.fName, e.fStartOffset, e.fEndOffset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1349,10 +1375,9 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
public org.eclipse.cdt.internal.core.parser.scanner2.ILocationResolver getLocationResolver() {
|
public org.eclipse.cdt.internal.core.parser.scanner2.ILocationResolver getLocationResolver() {
|
||||||
throw new UnsupportedOperationException();
|
return fLocationMap;
|
||||||
}
|
}
|
||||||
public void setOffsetBoundary(int offset) {
|
public void setOffsetBoundary(int offset) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,11 +6,12 @@
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
* http://www.eclipse.org/legal/epl-v10.html
|
||||||
*
|
*
|
||||||
* Contributors:
|
* Contributors:
|
||||||
* IBM - Initial API and implementation
|
* IBM - Initial API and implementation
|
||||||
* Markus Schorn (Wind River Systems)
|
* Markus Schorn (Wind River Systems)
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||||
|
@ -83,4 +84,9 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
||||||
* Returns the definition for a macro.
|
* Returns the definition for a macro.
|
||||||
*/
|
*/
|
||||||
public IASTName[] getDeclarations(IMacroBinding binding);
|
public IASTName[] getDeclarations(IMacroBinding binding);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the comments encountered.
|
||||||
|
*/
|
||||||
|
IASTComment[] getComments();
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,7 @@ final public class Lexer {
|
||||||
public static final int tEND_OF_INPUT = IToken.FIRST_RESERVED_SCANNER + 2;
|
public static final int tEND_OF_INPUT = IToken.FIRST_RESERVED_SCANNER + 2;
|
||||||
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
||||||
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 4;
|
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 4;
|
||||||
|
public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 5;
|
||||||
|
|
||||||
private static final int END_OF_INPUT = -1;
|
private static final int END_OF_INPUT = -1;
|
||||||
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
||||||
|
@ -176,33 +177,6 @@ final public class Lexer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Advances to the next newline.
|
|
||||||
* @return the list of tokens found on this line.
|
|
||||||
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
|
|
||||||
*/
|
|
||||||
public final void getTokensOfLine(int origin, TokenList result) throws OffsetLimitReachedException {
|
|
||||||
Token t= fToken;
|
|
||||||
while(true) {
|
|
||||||
switch(t.getType()) {
|
|
||||||
case IToken.tCOMPLETION:
|
|
||||||
fToken= t;
|
|
||||||
throw new OffsetLimitReachedException(origin, t);
|
|
||||||
case Lexer.tEND_OF_INPUT:
|
|
||||||
fToken= t;
|
|
||||||
if (fOptions.fSupportContentAssist) {
|
|
||||||
throw new OffsetLimitReachedException(origin, null);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
case Lexer.tNEWLINE:
|
|
||||||
fToken= t;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
result.append(t);
|
|
||||||
t= fetchToken();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Advances to the next pound token that starts a preprocessor directive.
|
* Advances to the next pound token that starts a preprocessor directive.
|
||||||
* @return pound token of the directive or end-of-input.
|
* @return pound token of the directive or end-of-input.
|
||||||
|
@ -273,18 +247,34 @@ final public class Lexer {
|
||||||
lineComment(start);
|
lineComment(start);
|
||||||
continue;
|
continue;
|
||||||
case '*':
|
case '*':
|
||||||
nextCharPhase3();
|
|
||||||
blockComment(start);
|
blockComment(start);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case '#':
|
case '%':
|
||||||
if (d == '#') {
|
|
||||||
nextCharPhase3();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (hadNL) {
|
if (hadNL) {
|
||||||
|
if (d == ':') {
|
||||||
|
// found at least '#'
|
||||||
|
final int e= nextCharPhase3();
|
||||||
|
if (e == '%') {
|
||||||
|
markPhase3();
|
||||||
|
if (nextCharPhase3() == ':') {
|
||||||
|
// found '##'
|
||||||
|
nextCharPhase3();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
restorePhase3();
|
||||||
|
}
|
||||||
|
fFirstTokenAfterNewline= true;
|
||||||
|
fToken= newDigraphToken(IToken.tPOUND, start);
|
||||||
|
return fToken;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
|
||||||
|
case '#':
|
||||||
|
if (hadNL && d != '#') {
|
||||||
fFirstTokenAfterNewline= true;
|
fFirstTokenAfterNewline= true;
|
||||||
fToken= newToken(IToken.tPOUND, start);
|
fToken= newToken(IToken.tPOUND, start);
|
||||||
return fToken;
|
return fToken;
|
||||||
|
@ -359,7 +349,7 @@ final public class Lexer {
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
return identifier(start, 2);
|
return identifier(start, 2);
|
||||||
}
|
}
|
||||||
return newToken(IToken.tOTHER_CHARACTER, start, 1);
|
return newToken(tOTHER_CHARACTER, start, 1);
|
||||||
|
|
||||||
case '0': case '1': case '2': case '3': case '4':
|
case '0': case '1': case '2': case '3': case '4':
|
||||||
case '5': case '6': case '7': case '8': case '9':
|
case '5': case '6': case '7': case '8': case '9':
|
||||||
|
@ -470,7 +460,6 @@ final public class Lexer {
|
||||||
lineComment(start);
|
lineComment(start);
|
||||||
continue;
|
continue;
|
||||||
case '*':
|
case '*':
|
||||||
nextCharPhase3();
|
|
||||||
blockComment(start);
|
blockComment(start);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -607,7 +596,7 @@ final public class Lexer {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// handles for instance @
|
// handles for instance @
|
||||||
return newToken(IToken.tOTHER_CHARACTER, start, 1);
|
return newToken(tOTHER_CHARACTER, start, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class LocationMap implements ILocationResolver {
|
||||||
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
||||||
int endNumber= getSequenceNumberForOffset(endOffset);
|
int endNumber= getSequenceNumberForOffset(endOffset);
|
||||||
final ASTInclusionStatement inclusionStatement=
|
final ASTInclusionStatement inclusionStatement=
|
||||||
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, endNumber, name, filename, userInclude, true);
|
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, name, filename, userInclude, true);
|
||||||
fDirectives.add(inclusionStatement);
|
fDirectives.add(inclusionStatement);
|
||||||
fCurrentContext= new FileLocationCtx((ContainerLocationCtx) fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
fCurrentContext= new FileLocationCtx((ContainerLocationCtx) fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
||||||
fLastChildInsertionOffset= 0;
|
fLastChildInsertionOffset= 0;
|
||||||
|
@ -203,11 +203,11 @@ public class LocationMap implements ILocationResolver {
|
||||||
*/
|
*/
|
||||||
public void encounterPoundInclude(int startOffset, int nameOffset, int nameEndOffset, int endOffset,
|
public void encounterPoundInclude(int startOffset, int nameOffset, int nameEndOffset, int endOffset,
|
||||||
char[] name, String filename, boolean userInclude, boolean active) {
|
char[] name, String filename, boolean userInclude, boolean active) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
nameOffset= getSequenceNumberForOffset(nameOffset); // there may be a macro expansion
|
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, endOffset, name, filename, userInclude, active));
|
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, name, filename, userInclude, active));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
||||||
|
@ -230,11 +230,11 @@ public class LocationMap implements ILocationResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundElif(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
public void encounterPoundElif(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset); // there may be a macro expansion
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTElif(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
fDirectives.add(new ASTElif(fTranslationUnit, startOffset, condOffset, condEndOffset, isActive));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundEndIf(int startOffset, int endOffset) {
|
public void encounterPoundEndIf(int startOffset, int endOffset) {
|
||||||
|
@ -247,40 +247,40 @@ public class LocationMap implements ILocationResolver {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset);
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTError(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset));
|
fDirectives.add(new ASTError(fTranslationUnit, startOffset, condOffset, condEndOffset));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundPragma(int startOffset, int condOffset, int condEndOffset, int endOffset) {
|
public void encounterPoundPragma(int startOffset, int condOffset, int condEndOffset, int endOffset) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset);
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTPragma(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset));
|
fDirectives.add(new ASTPragma(fTranslationUnit, startOffset, condOffset, condEndOffset));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundIfdef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
public void encounterPoundIfdef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset);
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTIfdef(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
fDirectives.add(new ASTIfdef(fTranslationUnit, startOffset, condOffset, condEndOffset, isActive));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundIfndef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
public void encounterPoundIfndef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset);
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTIfndef(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
fDirectives.add(new ASTIfndef(fTranslationUnit, startOffset, condOffset, condEndOffset, isActive));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundIf(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
public void encounterPoundIf(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
condOffset= getSequenceNumberForOffset(condOffset); // there may be a macro expansion
|
condOffset= getSequenceNumberForOffset(condOffset);
|
||||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTIf(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
fDirectives.add(new ASTIf(fTranslationUnit, startOffset, condOffset, condEndOffset, isActive));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounterPoundDefine(int startOffset, int nameOffset, int nameEndOffset, int expansionOffset, int endOffset, IMacroBinding macrodef) {
|
public void encounterPoundDefine(int startOffset, int nameOffset, int nameEndOffset, int expansionOffset, int endOffset, IMacroBinding macrodef) {
|
||||||
|
@ -303,8 +303,8 @@ public class LocationMap implements ILocationResolver {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
nameOffset= getSequenceNumberForOffset(nameOffset);
|
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||||
endOffset= getSequenceNumberForOffset(endOffset);
|
// endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTUndef(fTranslationUnit, name, startOffset, nameOffset, nameEndOffset, endOffset, definition));
|
fDirectives.add(new ASTUndef(fTranslationUnit, name, startOffset, nameOffset, nameEndOffset, definition));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setRootNode(IASTTranslationUnit root) {
|
public void setRootNode(IASTTranslationUnit root) {
|
||||||
|
@ -415,13 +415,14 @@ public class LocationMap implements ILocationResolver {
|
||||||
return new DependencyTree(fRootContext);
|
return new DependencyTree(fRootContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void cleanup() {
|
||||||
|
}
|
||||||
|
|
||||||
// stuff to remove from ILocationResolver
|
// stuff to remove from ILocationResolver
|
||||||
public IASTName[] getMacroExpansions() {
|
public IASTName[] getMacroExpansions() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
public void cleanup() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
// mstodo- locations
|
// mstodo- locations
|
||||||
public IASTFileLocation flattenLocations(IASTNodeLocation[] locations) {
|
public IASTFileLocation flattenLocations(IASTNodeLocation[] locations) {
|
||||||
if (locations.length != 1 || !(locations[0] instanceof IASTFileLocation)) {
|
if (locations.length != 1 || !(locations[0] instanceof IASTFileLocation)) {
|
||||||
|
|
|
@ -25,8 +25,12 @@ import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||||
class MacroDefinitionParser {
|
class MacroDefinitionParser {
|
||||||
static class InvalidMacroDefinitionException extends Exception {
|
static class InvalidMacroDefinitionException extends Exception {
|
||||||
public char[] fName;
|
public char[] fName;
|
||||||
public InvalidMacroDefinitionException(char[] name) {
|
public int fStartOffset;
|
||||||
|
public int fEndOffset;
|
||||||
|
public InvalidMacroDefinitionException(char[] name, int startOffset, int endOffset) {
|
||||||
fName= name;
|
fName= name;
|
||||||
|
fStartOffset= startOffset;
|
||||||
|
fEndOffset= endOffset;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,14 +49,6 @@ class MacroDefinitionParser {
|
||||||
return fNameToken;
|
return fNameToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* In case the expansion was successfully parsed, the start offset is returned.
|
|
||||||
* Otherwise the return value is undefined.
|
|
||||||
*/
|
|
||||||
public int getExpansionOffset() {
|
|
||||||
return fExpansionOffset;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses an entire macro definition. Name must be the next token of the lexer.
|
* Parses an entire macro definition. Name must be the next token of the lexer.
|
||||||
*/
|
*/
|
||||||
|
@ -81,7 +77,7 @@ class MacroDefinitionParser {
|
||||||
final char[][] paramList= parseParamList(lexer, name);
|
final char[][] paramList= parseParamList(lexer, name);
|
||||||
final Token replacementToken = lexer.currentToken();
|
final Token replacementToken = lexer.currentToken();
|
||||||
if (replacementToken.getType() != Lexer.tEND_OF_INPUT) {
|
if (replacementToken.getType() != Lexer.tEND_OF_INPUT) {
|
||||||
throw new InvalidMacroDefinitionException(nameChars);
|
throw new InvalidMacroDefinitionException(nameChars, replacementToken.getOffset(), replacementToken.getEndOffset());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (paramList == null) {
|
if (paramList == null) {
|
||||||
|
@ -136,7 +132,7 @@ class MacroDefinitionParser {
|
||||||
if (tt == IToken.tCOMPLETION) {
|
if (tt == IToken.tCOMPLETION) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, name);
|
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, name);
|
||||||
}
|
}
|
||||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
throw new InvalidMacroDefinitionException(name.getCharImage(), name.getOffset(), name.getEndOffset());
|
||||||
}
|
}
|
||||||
fNameToken= name;
|
fNameToken= name;
|
||||||
return name;
|
return name;
|
||||||
|
@ -178,12 +174,12 @@ class MacroDefinitionParser {
|
||||||
}
|
}
|
||||||
// no break;
|
// no break;
|
||||||
default:
|
default:
|
||||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
throw new InvalidMacroDefinitionException(name.getCharImage(), name.getOffset(), param.getEndOffset());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
while (fHasVarArgs==0 && next.getType() == IToken.tCOMMA);
|
while (fHasVarArgs==0 && next.getType() == IToken.tCOMMA);
|
||||||
if (next.getType() != IToken.tRPAREN) {
|
if (next.getType() != IToken.tRPAREN) {
|
||||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
throw new InvalidMacroDefinitionException(name.getCharImage(), name.getOffset(), next.getEndOffset());
|
||||||
}
|
}
|
||||||
next= lex.nextToken(); // consume the closing parenthesis
|
next= lex.nextToken(); // consume the closing parenthesis
|
||||||
|
|
||||||
|
|
|
@ -80,23 +80,21 @@ public class MacroExpander {
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean findLParenthesis(IdentityHashMap forbidden) throws OffsetLimitReachedException {
|
public boolean findLParenthesis() throws OffsetLimitReachedException {
|
||||||
Token t= first();
|
Token t= first();
|
||||||
while (t != null) {
|
while (t != null) {
|
||||||
switch (t.getType()) {
|
switch (t.getType()) {
|
||||||
case CPreprocessor.tSPACE:
|
case CPreprocessor.tSPACE:
|
||||||
|
case CPreprocessor.tNOSPACE:
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
break;
|
|
||||||
case CPreprocessor.tSCOPE_MARKER:
|
case CPreprocessor.tSCOPE_MARKER:
|
||||||
((ExpansionBoundary) t).execute(forbidden);
|
|
||||||
break;
|
break;
|
||||||
case IToken.tLPAREN:
|
case IToken.tLPAREN:
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
removeFirst();
|
t= (Token) t.getNext();
|
||||||
t= first();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fUseCpp) {
|
if (fUseCpp) {
|
||||||
|
@ -146,7 +144,9 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expects that the identifier of the macro expansion has been consumed.
|
* Expects that the identifier of the macro expansion has been consumed. Expands the macro consuming
|
||||||
|
* tokens from the input (to read the parameters) and stores the resulting tokens together
|
||||||
|
* with boundary markers in the result token list.
|
||||||
* Returns the last token of the expansion.
|
* Returns the last token of the expansion.
|
||||||
*/
|
*/
|
||||||
private Token expandOne(Token lastConsumed, PreprocessorMacro macro, IdentityHashMap forbidden, TokenSource input, TokenList result)
|
private Token expandOne(Token lastConsumed, PreprocessorMacro macro, IdentityHashMap forbidden, TokenSource input, TokenList result)
|
||||||
|
@ -167,7 +167,7 @@ public class MacroExpander {
|
||||||
replaceArgs(macro, clonedArgs, expandedArgs, result);
|
replaceArgs(macro, clonedArgs, expandedArgs, result);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
objStyleTokenPaste(macro, macro.getTokens(fDefinitionParser, fLexOptions), result);
|
objStyleTokenPaste(macro, result);
|
||||||
}
|
}
|
||||||
result.append(new ExpansionBoundary(macro, false));
|
result.append(new ExpansionBoundary(macro, false));
|
||||||
return lastConsumed;
|
return lastConsumed;
|
||||||
|
@ -180,31 +180,30 @@ public class MacroExpander {
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case CPreprocessor.tSCOPE_MARKER:
|
case CPreprocessor.tSCOPE_MARKER:
|
||||||
((ExpansionBoundary) t).execute(forbidden);
|
((ExpansionBoundary) t).execute(forbidden);
|
||||||
break;
|
t= input.removeFirst(); // don't change l
|
||||||
|
continue;
|
||||||
case IToken.tIDENTIFIER:
|
case IToken.tIDENTIFIER:
|
||||||
PreprocessorMacro macro= (PreprocessorMacro) fDictionary.get(t.getCharImage());
|
PreprocessorMacro macro= (PreprocessorMacro) fDictionary.get(t.getCharImage());
|
||||||
if (macro != null && !forbidden.containsKey(macro)) {
|
// tricky: don't mark function-style macros if you don't find the left parenthesis
|
||||||
final boolean isFunctionStyle= macro.isFunctionStyle();
|
if (macro == null || (macro.isFunctionStyle() && !input.findLParenthesis())) {
|
||||||
if (!isFunctionStyle || input.findLParenthesis(forbidden)) {
|
result.append(t);
|
||||||
// mstodo- image location
|
|
||||||
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
|
|
||||||
TokenList replacement= new TokenList();
|
|
||||||
if (l != null && l.hasGap(t)) {
|
|
||||||
replacement.append(space());
|
|
||||||
}
|
|
||||||
Token last= expandOne(t, macro, forbidden, input, replacement);
|
|
||||||
Token n= input.first();
|
|
||||||
if (n != null && last.hasGap(n)) {
|
|
||||||
replacement.append(space());
|
|
||||||
}
|
|
||||||
input.prepend(replacement);
|
|
||||||
t= null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (t != null) {
|
else if (forbidden.containsKey(macro)) {
|
||||||
t.setType(CPreprocessor.tEXPANDED_IDENTIFIER); // prevent any further expansion
|
t.setType(CPreprocessor.tEXPANDED_IDENTIFIER); // prevent any further expansion
|
||||||
result.append(t);
|
result.append(t);
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
// mstodo- image location
|
||||||
|
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
|
||||||
|
|
||||||
|
TokenList replacement= new TokenList();
|
||||||
|
|
||||||
|
addSpacemarker(l, t, replacement); // start expansion
|
||||||
|
Token last= expandOne(t, macro, forbidden, input, replacement);
|
||||||
|
addSpacemarker(last, input.first(), replacement); // end expansion
|
||||||
|
|
||||||
|
input.prepend(replacement);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
result.append(t);
|
result.append(t);
|
||||||
|
@ -215,6 +214,21 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void addSpacemarker(Token l, Token t, TokenList target) {
|
||||||
|
if (l != null && t != null) {
|
||||||
|
final Object s1= l.fSource;
|
||||||
|
final Object s2= t.fSource;
|
||||||
|
if (s1 == s2 && s1 != null) {
|
||||||
|
if (l.getEndOffset() == t.getOffset()) {
|
||||||
|
target.append(new SimpleToken(CPreprocessor.tNOSPACE, null, 0, 0));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
target.append(new SimpleToken(CPreprocessor.tSPACE, null, 0, 0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expects that the identifier has been consumed.
|
* Expects that the identifier has been consumed.
|
||||||
* @param forbidden
|
* @param forbidden
|
||||||
|
@ -225,17 +239,15 @@ public class MacroExpander {
|
||||||
final boolean hasVarargs= macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS;
|
final boolean hasVarargs= macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS;
|
||||||
final int requiredArgs= hasVarargs ? argCount-1 : argCount;
|
final int requiredArgs= hasVarargs ? argCount-1 : argCount;
|
||||||
int idx= 0;
|
int idx= 0;
|
||||||
int nesting= 0;
|
int nesting= -1;
|
||||||
for (int i = 0; i < result.length; i++) {
|
for (int i = 0; i < result.length; i++) {
|
||||||
result[i]= new TokenSource(false);
|
result[i]= new TokenSource(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Token lastToken= input.fetchFirst();
|
|
||||||
assert lastToken != null && lastToken.getType() == IToken.tLPAREN;
|
|
||||||
|
|
||||||
boolean complete= false;
|
boolean complete= false;
|
||||||
boolean isFirstOfArg= true;
|
boolean isFirstOfArg= true;
|
||||||
Token space= null;
|
Token lastToken= null;
|
||||||
|
Token spaceMarker= null;
|
||||||
loop: while (true) {
|
loop: while (true) {
|
||||||
Token t= input.fetchFirst();
|
Token t= input.fetchFirst();
|
||||||
if (t == null) {
|
if (t == null) {
|
||||||
|
@ -244,6 +256,7 @@ public class MacroExpander {
|
||||||
lastToken= t;
|
lastToken= t;
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case Lexer.tEND_OF_INPUT:
|
case Lexer.tEND_OF_INPUT:
|
||||||
|
assert nesting >= 0;
|
||||||
if (fCompletionMode) {
|
if (fCompletionMode) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN, null);
|
throw new OffsetLimitReachedException(ORIGIN, null);
|
||||||
}
|
}
|
||||||
|
@ -256,10 +269,14 @@ public class MacroExpander {
|
||||||
continue loop;
|
continue loop;
|
||||||
|
|
||||||
case IToken.tLPAREN:
|
case IToken.tLPAREN:
|
||||||
++nesting;
|
// the first one sets nesting to zero.
|
||||||
|
if (++nesting == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tRPAREN:
|
case IToken.tRPAREN:
|
||||||
|
assert nesting >= 0;
|
||||||
if (--nesting < 0) {
|
if (--nesting < 0) {
|
||||||
complete= true;
|
complete= true;
|
||||||
break loop;
|
break loop;
|
||||||
|
@ -267,10 +284,11 @@ public class MacroExpander {
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tCOMMA:
|
case IToken.tCOMMA:
|
||||||
|
assert nesting >= 0;
|
||||||
if (nesting == 0) {
|
if (nesting == 0) {
|
||||||
if (idx < argCount-1) { // next argument
|
if (idx < argCount-1) { // next argument
|
||||||
isFirstOfArg= true;
|
isFirstOfArg= true;
|
||||||
space= null;
|
spaceMarker= null;
|
||||||
idx++;
|
idx++;
|
||||||
continue loop;
|
continue loop;
|
||||||
}
|
}
|
||||||
|
@ -290,17 +308,21 @@ public class MacroExpander {
|
||||||
continue loop;
|
continue loop;
|
||||||
|
|
||||||
case CPreprocessor.tSPACE:
|
case CPreprocessor.tSPACE:
|
||||||
|
case CPreprocessor.tNOSPACE:
|
||||||
if (!isFirstOfArg) {
|
if (!isFirstOfArg) {
|
||||||
space= t;
|
spaceMarker= t;
|
||||||
}
|
}
|
||||||
continue loop;
|
continue loop;
|
||||||
|
|
||||||
|
default:
|
||||||
|
assert nesting >= 0;
|
||||||
}
|
}
|
||||||
if (argCount == 0) {
|
if (argCount == 0) {
|
||||||
break loop;
|
break loop;
|
||||||
}
|
}
|
||||||
if (space != null) {
|
if (spaceMarker != null) {
|
||||||
result[idx].append(space);
|
result[idx].append(spaceMarker);
|
||||||
space= null;
|
spaceMarker= null;
|
||||||
}
|
}
|
||||||
result[idx].append(t);
|
result[idx].append(t);
|
||||||
isFirstOfArg= false;
|
isFirstOfArg= false;
|
||||||
|
@ -317,34 +339,37 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
||||||
TokenList input= macro.getTokens(fDefinitionParser, fLexOptions);
|
TokenList replacement= clone(macro.getTokens(fDefinitionParser, fLexOptions));
|
||||||
|
|
||||||
Token l= null;
|
Token l= null;
|
||||||
Token n;
|
Token n;
|
||||||
Token pasteArg1= null;
|
Token pasteArg1= null;
|
||||||
for (Token t= input.first(); t != null; l=t, t=n) {
|
for (Token t= replacement.first(); t != null; l=t, t=n) {
|
||||||
n= (Token) t.getNext();
|
n= (Token) t.getNext();
|
||||||
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||||
|
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case CPreprocessor.tMACRO_PARAMETER:
|
case CPreprocessor.tMACRO_PARAMETER:
|
||||||
if (l != null && l.hasGap(t)) {
|
|
||||||
result.append(space());
|
|
||||||
}
|
|
||||||
int idx= ((PlaceHolderToken) t).getIndex();
|
int idx= ((PlaceHolderToken) t).getIndex();
|
||||||
if (idx < args.length) { // be defensive
|
if (idx < args.length) { // be defensive
|
||||||
TokenList arg= pasteNext ? args[idx] : expandedArgs[idx];
|
addSpacemarker(l, t, result); // start argument replacement
|
||||||
pasteArg1= cloneAndAppend(arg.first(), result, pasteNext);
|
TokenList arg= clone(pasteNext ? args[idx] : expandedArgs[idx]);
|
||||||
}
|
if (pasteNext) {
|
||||||
if (n != null && t.hasGap(n)) {
|
pasteArg1= arg.last();
|
||||||
result.append(space());
|
if (pasteArg1 != null) {
|
||||||
|
result.appendAllButLast(arg);
|
||||||
|
addSpacemarker(result.last(), pasteArg1, result); // start token paste
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.appendAll(arg);
|
||||||
|
addSpacemarker(t, n, result); // end argument replacement
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tPOUND:
|
case IToken.tPOUND:
|
||||||
if (l != null && l.hasGap(t)) {
|
addSpacemarker(l, t, result); // start stringify
|
||||||
result.append(space());
|
|
||||||
}
|
|
||||||
StringBuffer buf= new StringBuffer();
|
StringBuffer buf= new StringBuffer();
|
||||||
buf.append('"');
|
buf.append('"');
|
||||||
if (n != null && n.getType() == CPreprocessor.tMACRO_PARAMETER) {
|
if (n != null && n.getType() == CPreprocessor.tMACRO_PARAMETER) {
|
||||||
|
@ -361,76 +386,102 @@ public class MacroExpander {
|
||||||
final char[] image= new char[length];
|
final char[] image= new char[length];
|
||||||
buf.getChars(0, length, image, 0);
|
buf.getChars(0, length, image, 0);
|
||||||
|
|
||||||
pasteArg1= appendToResult(new ImageToken(IToken.tSTRING, null, 0, 0, image), result, pasteNext);
|
Token generated= new ImageToken(IToken.tSTRING, null, 0, 0, image);
|
||||||
if (!pasteNext && n != null && t.hasGap(n)) {
|
if (pasteNext) { // start token paste, same as start stringify
|
||||||
result.append(space());
|
pasteArg1= generated;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.append(generated);
|
||||||
|
addSpacemarker(t, n, result); // end stringify
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tPOUNDPOUND:
|
case IToken.tPOUNDPOUND:
|
||||||
if (pasteArg1 != null) {
|
if (pasteArg1 != null) {
|
||||||
Token pasteArg2= null;
|
Token pasteArg2= null;
|
||||||
Token rest= null;
|
TokenList rest= null;
|
||||||
if (n != null) {
|
if (n != null) {
|
||||||
if (n.getType() == CPreprocessor.tMACRO_PARAMETER) {
|
if (n.getType() == CPreprocessor.tMACRO_PARAMETER) {
|
||||||
idx= ((PlaceHolderToken) n).getIndex();
|
idx= ((PlaceHolderToken) n).getIndex();
|
||||||
if (idx < args.length) { // be defensive
|
if (idx < args.length) { // be defensive
|
||||||
TokenList arg= args[idx];
|
TokenList arg= clone(args[idx]);
|
||||||
pasteArg2= arg.first();
|
pasteArg2= arg.first();
|
||||||
if (pasteArg2 != null) {
|
|
||||||
rest= (Token) pasteArg2.getNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
// gcc-extension
|
// gcc-extension
|
||||||
if (idx == args.length-1 && macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS) {
|
if (idx == args.length-1 && macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS) {
|
||||||
if (pasteArg1.getType() == IToken.tCOMMA) {
|
if (pasteArg1.getType() == IToken.tCOMMA) { // no paste operation
|
||||||
if (pasteArg2 == null) {
|
if (arg.first() != null) {
|
||||||
pasteArg1= null;
|
result.append(pasteArg1);
|
||||||
}
|
rest= arg;
|
||||||
else {
|
|
||||||
pasteArg2.setNext(rest);
|
|
||||||
rest= pasteArg2;
|
|
||||||
pasteArg2= null;
|
|
||||||
}
|
}
|
||||||
|
pasteArg1= pasteArg2= null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (pasteArg2 != null) {
|
||||||
|
rest= arg;
|
||||||
|
rest.removeFirst();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
idx= -1;
|
||||||
pasteArg2= n;
|
pasteArg2= n;
|
||||||
}
|
}
|
||||||
t= n;
|
t= n;
|
||||||
n= (Token) n.getNext();
|
n= (Token) n.getNext();
|
||||||
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||||
}
|
|
||||||
Token tp= tokenpaste(pasteArg1, pasteArg2, macro);
|
generated= tokenpaste(pasteArg1, pasteArg2, macro);
|
||||||
if (tp != null) {
|
pasteArg1= null;
|
||||||
pasteArg1= appendToResult((Token) tp.clone(), result, pasteNext && rest == null);
|
|
||||||
}
|
if (generated != null) {
|
||||||
if (rest != null) {
|
if (pasteNext && rest == null) {
|
||||||
pasteArg1= cloneAndAppend(rest, result, pasteNext);
|
pasteArg1= generated; // no need to mark spaces, done ahead
|
||||||
}
|
}
|
||||||
if (!pasteNext && n != null && t.hasGap(n)) {
|
else {
|
||||||
result.append(space());
|
result.append(generated);
|
||||||
|
addSpacemarker(pasteArg2, rest == null ? n : rest.first(), result); // end token paste
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (rest != null) {
|
||||||
|
if (pasteNext) {
|
||||||
|
pasteArg1= rest.last();
|
||||||
|
if (pasteArg1 != null) {
|
||||||
|
result.appendAllButLast(rest);
|
||||||
|
addSpacemarker(result.last(), pasteArg1, result); // start token paste
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.appendAll(rest);
|
||||||
|
if (idx >= 0) {
|
||||||
|
addSpacemarker(t, n, result); // end argument replacement
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
|
if (pasteNext) {
|
||||||
|
addSpacemarker(l, t, result); // start token paste
|
||||||
|
pasteArg1= t;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.append(t);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private SimpleToken space() {
|
private void objStyleTokenPaste(PreprocessorMacro macro, TokenList result) {
|
||||||
return new SimpleToken(CPreprocessor.tSPACE, null, 0, 0);
|
TokenList replacement= clone(macro.getTokens(fDefinitionParser, fLexOptions));
|
||||||
}
|
|
||||||
|
|
||||||
private void objStyleTokenPaste(PreprocessorMacro macro, TokenList input, TokenList result) {
|
Token l= null;
|
||||||
Token n;
|
Token n;
|
||||||
Token pasteArg1= null;
|
Token pasteArg1= null;
|
||||||
for (Token t= input.first(); t != null; t=n) {
|
for (Token t= replacement.first(); t != null; l=t, t=n) {
|
||||||
n= (Token) t.getNext();
|
n= (Token) t.getNext();
|
||||||
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||||
|
|
||||||
|
@ -446,47 +497,36 @@ public class MacroExpander {
|
||||||
|
|
||||||
t= tokenpaste(pasteArg1, pasteArg2, macro);
|
t= tokenpaste(pasteArg1, pasteArg2, macro);
|
||||||
if (t != null) {
|
if (t != null) {
|
||||||
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
|
if (pasteNext) {
|
||||||
|
pasteArg1= t;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.append(t);
|
||||||
|
addSpacemarker(pasteArg2, n, result); // end token paste
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
|
if (pasteNext) {
|
||||||
|
addSpacemarker(l, t, result); // start token paste
|
||||||
|
pasteArg1= t;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.append(t);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Token appendToResult(Token t, TokenList result, boolean pasteNext) {
|
private TokenList clone(TokenList tl) {
|
||||||
if (pasteNext) {
|
TokenList result= new TokenList();
|
||||||
return t;
|
for (Token t= tl.first(); t != null; t= (Token) t.getNext()) {
|
||||||
}
|
|
||||||
result.append(t);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Token cloneAndAppend(Token tokens, TokenList result, boolean pasteNext) {
|
|
||||||
Token t= tokens;
|
|
||||||
if (t == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
Token n= (Token) t.getNext();
|
|
||||||
Token p= null;
|
|
||||||
while (n != null) {
|
|
||||||
result.append((Token) t.clone());
|
result.append((Token) t.clone());
|
||||||
p= t;
|
|
||||||
t= n;
|
|
||||||
n= (Token) n.getNext();
|
|
||||||
}
|
}
|
||||||
if (t != null && !pasteNext) {
|
return result;
|
||||||
result.append((Token) t.clone());
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (p != null && p.hasGap(t)) {
|
|
||||||
result.append(space());
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Token tokenpaste(Token arg1, Token arg2, PreprocessorMacro macro) {
|
private Token tokenpaste(Token arg1, Token arg2, PreprocessorMacro macro) {
|
||||||
|
@ -522,9 +562,10 @@ public class MacroExpander {
|
||||||
Token l= null;
|
Token l= null;
|
||||||
Token n;
|
Token n;
|
||||||
boolean space= false;
|
boolean space= false;
|
||||||
for (; t != null; l=t, t= n) {
|
for (; t != null; l=t, t=n) {
|
||||||
n= (Token) t.getNext();
|
n= (Token) t.getNext();
|
||||||
if (!space && l != null && l.hasGap(t)) {
|
if (!space && l != null && l.fSource != null && l.fSource == t.fSource &&
|
||||||
|
l.getEndOffset() != t.getOffset()) {
|
||||||
buf.append(' ');
|
buf.append(' ');
|
||||||
space= true;
|
space= true;
|
||||||
}
|
}
|
||||||
|
@ -551,6 +592,9 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case CPreprocessor.tNOSPACE:
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
buf.append(t.getCharImage());
|
buf.append(t.getCharImage());
|
||||||
space= false;
|
space= false;
|
||||||
|
@ -581,6 +625,7 @@ public class MacroExpander {
|
||||||
break;
|
break;
|
||||||
case CPreprocessor.tSCOPE_MARKER:
|
case CPreprocessor.tSCOPE_MARKER:
|
||||||
case CPreprocessor.tSPACE:
|
case CPreprocessor.tSPACE:
|
||||||
|
case CPreprocessor.tNOSPACE:
|
||||||
replacement.removeBehind(l);
|
replacement.removeBehind(l);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,6 +127,14 @@ class ObjectStyleMacro extends PreprocessorMacro {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int getExpansionOffset() {
|
||||||
|
return fExpansionOffset;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getExpansionEndOffset() {
|
||||||
|
return fEndOffset;
|
||||||
|
}
|
||||||
|
|
||||||
private void setSource(Token t) {
|
private void setSource(Token t) {
|
||||||
while (t != null) {
|
while (t != null) {
|
||||||
t.fSource= this;
|
t.fSource= this;
|
||||||
|
|
|
@ -26,7 +26,7 @@ class TokenList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final public void append(Token t) {
|
public final void append(Token t) {
|
||||||
if (fFirst == null) {
|
if (fFirst == null) {
|
||||||
fFirst= fLast= t;
|
fFirst= fLast= t;
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,30 @@ class TokenList {
|
||||||
t.setNext(null);
|
t.setNext(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
final public void prepend(TokenList prepend) {
|
public final void appendAll(TokenList tl) {
|
||||||
|
final Token t= tl.first();
|
||||||
|
if (t != null) {
|
||||||
|
if (fFirst == null) {
|
||||||
|
fFirst= tl.fFirst;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
fLast.setNext(tl.fFirst);
|
||||||
|
}
|
||||||
|
fLast= tl.fLast;
|
||||||
|
}
|
||||||
|
tl.fFirst= tl.fLast= null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final void appendAllButLast(TokenList tl) {
|
||||||
|
Token t= tl.first();
|
||||||
|
if (t != null) {
|
||||||
|
for (Token n= (Token) t.getNext(); n != null; t=n, n= (Token) n.getNext()) {
|
||||||
|
append(t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public final void prepend(TokenList prepend) {
|
||||||
final Token first= prepend.fFirst;
|
final Token first= prepend.fFirst;
|
||||||
if (first != null) {
|
if (first != null) {
|
||||||
final Token last= prepend.fLast;
|
final Token last= prepend.fLast;
|
||||||
|
@ -49,7 +72,7 @@ class TokenList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final public TokenList cloneTokens() {
|
public final TokenList cloneTokens() {
|
||||||
TokenList result= new TokenList();
|
TokenList result= new TokenList();
|
||||||
for (Token t= fFirst; t != null; t= (Token) t.getNext()) {
|
for (Token t= fFirst; t != null; t= (Token) t.getNext()) {
|
||||||
if (t.getType() != CPreprocessor.tSCOPE_MARKER) {
|
if (t.getType() != CPreprocessor.tSCOPE_MARKER) {
|
||||||
|
@ -59,10 +82,14 @@ class TokenList {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
final public Token first() {
|
public final Token first() {
|
||||||
return fFirst;
|
return fFirst;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final Token last() {
|
||||||
|
return fLast;
|
||||||
|
}
|
||||||
|
|
||||||
final void removeBehind(Token l) {
|
final void removeBehind(Token l) {
|
||||||
if (l == null) {
|
if (l == null) {
|
||||||
Token t= fFirst;
|
Token t= fFirst;
|
||||||
|
|
|
@ -124,6 +124,7 @@ public class TokenUtil {
|
||||||
case IGCCToken.tMAX: return Keywords.cpMAX;
|
case IGCCToken.tMAX: return Keywords.cpMAX;
|
||||||
|
|
||||||
case CPreprocessor.tSPACE: return SPACE;
|
case CPreprocessor.tSPACE: return SPACE;
|
||||||
|
case CPreprocessor.tNOSPACE: return CharArrayUtils.EMPTY;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return CharArrayUtils.EMPTY;
|
return CharArrayUtils.EMPTY;
|
||||||
|
|
Loading…
Add table
Reference in a new issue