1
0
Fork 0
mirror of https://github.com/eclipse-cdt/cdt synced 2025-04-29 19:45:01 +02:00

Implementation of macro-expansion.

This commit is contained in:
Markus Schorn 2007-10-29 16:33:15 +00:00
parent 1a138c44af
commit 1284a72c42
33 changed files with 3765 additions and 956 deletions

View file

@ -200,25 +200,25 @@ public class LexerTests extends BaseTestCase {
public void testLineSplicingHeaderName() throws Exception { public void testLineSplicingHeaderName() throws Exception {
init("p\"a\\\nb\""); init("p\"a\\\nb\"");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tQUOTE_HEADER_NAME, "\"ab\""); token(Lexer.tQUOTE_HEADER_NAME, "\"ab\"");
eof(); eof();
init("p\"a\\\r\nb\""); init("p\"a\\\r\nb\"");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tQUOTE_HEADER_NAME, "\"ab\""); token(Lexer.tQUOTE_HEADER_NAME, "\"ab\"");
eof(); eof();
init("p<a\\\nb>"); init("p<a\\\nb>");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tSYSTEM_HEADER_NAME, "<ab>"); token(Lexer.tSYSTEM_HEADER_NAME, "<ab>");
eof(); eof();
init("p<a\\\r\nb>"); init("p<a\\\r\nb>");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tSYSTEM_HEADER_NAME, "<ab>"); token(Lexer.tSYSTEM_HEADER_NAME, "<ab>");
eof(); eof();
@ -280,13 +280,13 @@ public class LexerTests extends BaseTestCase {
public void testHeaderName() throws Exception { public void testHeaderName() throws Exception {
init("p\"'/*//\\\""); init("p\"'/*//\\\"");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tQUOTE_HEADER_NAME, "\"'/*//\\\""); token(Lexer.tQUOTE_HEADER_NAME, "\"'/*//\\\"");
eof(); eof();
init("p<'\"/*//>"); init("p<'\"/*//>");
fLexer.setInsideIncludeDirective(); fLexer.setInsideIncludeDirective(true);
id("p"); id("p");
token(Lexer.tSYSTEM_HEADER_NAME, "<'\"/*//>"); token(Lexer.tSYSTEM_HEADER_NAME, "<'\"/*//>");
eof(); eof();
@ -316,8 +316,7 @@ public class LexerTests extends BaseTestCase {
init(ident, false, true); init(ident, false, true);
final int idxDollar = ident.indexOf('$'); final int idxDollar = ident.indexOf('$');
id(ident.substring(0, idxDollar)); id(ident.substring(0, idxDollar));
problem(IProblem.SCANNER_BAD_CHARACTER, "$"); token(IToken.tOTHER_CHARACTER, "$");
ws();
id(ident.substring(idxDollar+1)); id(ident.substring(idxDollar+1));
} }
@ -437,7 +436,7 @@ public class LexerTests extends BaseTestCase {
IToken.tBITORASSIGN, IToken.tSHIFTL, IToken.tSHIFTR, IToken.tSHIFTLASSIGN, IToken.tBITORASSIGN, IToken.tSHIFTL, IToken.tSHIFTR, IToken.tSHIFTLASSIGN,
IToken.tSHIFTRASSIGN, IToken.tEQUAL, IToken.tNOTEQUAL, IToken.tLTEQUAL, IToken.tGTEQUAL, IToken.tSHIFTRASSIGN, IToken.tEQUAL, IToken.tNOTEQUAL, IToken.tLTEQUAL, IToken.tGTEQUAL,
IToken.tAND, IToken.tOR, IToken.tINCR, IToken.tDECR, IToken.tCOMMA, IToken.tARROWSTAR, IToken.tAND, IToken.tOR, IToken.tINCR, IToken.tDECR, IToken.tCOMMA, IToken.tARROWSTAR,
IToken.tARROW, IGCCToken.tMIN, IGCCToken.tMAX, IToken.tBACKSLASH, IToken.tARROW, IGCCToken.tMIN, IGCCToken.tMAX, IToken.tOTHER_CHARACTER,
}; };
for (int splices=0; splices<9; splices++) { for (int splices=0; splices<9; splices++) {

View file

@ -466,38 +466,38 @@ public class LocationMapTests extends BaseTestCase {
public void testContexts() { public void testContexts() {
init(DIGITS); init(DIGITS);
assertEquals(FN, fLocationMap.getTranslationUnitPath()); assertEquals(FN, fLocationMap.getTranslationUnitPath());
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
// number: [30,36)[46,50) // number: [30,36)[46,50)
ILocationCtx pre1= fLocationMap.pushPreInclusion("0102030405".toCharArray(), 0, false); ILocationCtx pre1= fLocationMap.pushPreInclusion("0102030405".toCharArray(), 0, false);
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
// number: [0,6)[26,30) // number: [0,6)[26,30)
ILocationCtx pre2= fLocationMap.pushPreInclusion("a1a2a3a4a5".toCharArray(), 0, true); ILocationCtx pre2= fLocationMap.pushPreInclusion("a1a2a3a4a5".toCharArray(), 0, true);
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
fLocationMap.encounteredComment(0,2,true); fLocationMap.encounteredComment(0,2,true);
// number: [6,15)[25,26) // number: [6,15)[25,26)
ILocationCtx i1= fLocationMap.pushInclusion(0, 2, 4, 6, "b1b2b3b4b5".toCharArray(), "pre1", "pre1".toCharArray(), false); ILocationCtx i1= fLocationMap.pushInclusion(0, 2, 4, 6, "b1b2b3b4b5".toCharArray(), "pre1", "pre1".toCharArray(), false);
assertEquals("pre1", fLocationMap.getCurrentFilename()); assertEquals("pre1", fLocationMap.getCurrentFilePath());
fLocationMap.encounteredComment(2,4,true); fLocationMap.encounteredComment(2,4,true);
// number: [15,25) // number: [15,25)
ILocationCtx i2= fLocationMap.pushInclusion(6, 7, 8, 9, "c1c2c3c4c5".toCharArray(), "pre11", "pre11".toCharArray(), false); ILocationCtx i2= fLocationMap.pushInclusion(6, 7, 8, 9, "c1c2c3c4c5".toCharArray(), "pre11", "pre11".toCharArray(), false);
assertEquals("pre11", fLocationMap.getCurrentFilename()); assertEquals("pre11", fLocationMap.getCurrentFilePath());
fLocationMap.encounteredComment(2,6,true); fLocationMap.encounteredComment(2,6,true);
fLocationMap.popContext(i2); fLocationMap.popContext(i2);
// add a comment before the include // add a comment before the include
fLocationMap.encounteredComment(4,6,false); fLocationMap.encounteredComment(4,6,false);
assertEquals("pre1", fLocationMap.getCurrentFilename()); assertEquals("pre1", fLocationMap.getCurrentFilePath());
fLocationMap.popContext(i1); fLocationMap.popContext(i1);
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
fLocationMap.popContext(pre2); fLocationMap.popContext(pre2);
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
// number [36, 46) // number [36, 46)
ILocationCtx i3= fLocationMap.pushInclusion(0, 2, 4, 6, "d1d2d3d4d5".toCharArray(), "pre2", "pre2".toCharArray(), false); ILocationCtx i3= fLocationMap.pushInclusion(0, 2, 4, 6, "d1d2d3d4d5".toCharArray(), "pre2", "pre2".toCharArray(), false);
assertEquals("pre2", fLocationMap.getCurrentFilename()); assertEquals("pre2", fLocationMap.getCurrentFilePath());
fLocationMap.encounteredComment(0,2,true); fLocationMap.encounteredComment(0,2,true);
fLocationMap.popContext(i3); fLocationMap.popContext(i3);
fLocationMap.popContext(pre1); fLocationMap.popContext(pre1);
assertEquals(FN, fLocationMap.getCurrentFilename()); assertEquals(FN, fLocationMap.getCurrentFilePath());
IASTComment[] comments= fLocationMap.getComments(); IASTComment[] comments= fLocationMap.getComments();

View file

@ -6,24 +6,20 @@
* http://www.eclipse.org/legal/epl-v10.html * http://www.eclipse.org/legal/epl-v10.html
* *
* Contributors: * Contributors:
* IBM - Initial API and implementation
* Markus Schorn (Wind River Systems) * Markus Schorn (Wind River Systems)
* Emanuel Graf (IFS)
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.core.parser.tests.scanner; package org.eclipse.cdt.core.parser.tests.scanner;
import junit.framework.Test; import junit.framework.Test;
import junit.framework.TestSuite; import junit.framework.TestSuite;
/**
* @author jcamelon
*/
public class ScannerTestSuite extends TestSuite { public class ScannerTestSuite extends TestSuite {
public static Test suite() { public static Test suite() {
TestSuite suite= new ScannerTestSuite(); TestSuite suite= new ScannerTestSuite();
suite.addTest(LexerTests.suite()); suite.addTest(LexerTests.suite());
suite.addTest(LocationMapTests.suite()); suite.addTest(LocationMapTests.suite());
suite.addTest(PortedScannerTest.suite());
return suite; return suite;
} }
} }

View file

@ -66,8 +66,11 @@ final class TestMacro implements IMacroBinding {
return result; return result;
} }
public String getFilename() { public char[] getExpansionImage() {
// mstodo Auto-generated method stub return getExpansion();
return null; }
public char[][] getParameterPlaceholderList() {
return getParameterList();
} }
} }

View file

@ -16,12 +16,6 @@ package org.eclipse.cdt.core.dom.ast;
* <p> This interface is not intended to be implemented by clients. </p> * <p> This interface is not intended to be implemented by clients. </p>
*/ */
public interface IMacroBinding extends IBinding { public interface IMacroBinding extends IBinding {
/**
* Returns the expansion of this macro definition.
* @since 5.0
*/
char[] getExpansion();
/** /**
* Returns <code>true</code> if this is a function-style macro. * Returns <code>true</code> if this is a function-style macro.
@ -33,4 +27,25 @@ public interface IMacroBinding extends IBinding {
* Returns the parameter names or <code>null</code> if this is not a function style macro. * Returns the parameter names or <code>null</code> if this is not a function style macro.
*/ */
char[][] getParameterList(); char[][] getParameterList();
/**
* Returns the expansion of this macro definition.
* @since 5.0
*/
char[] getExpansion();
/**
* Returns the parameter list where the name of the last parameter is changed if this is a variadic macro,
* or <code>null</code> if this is not a function style macro.
* The parameter '...' will be changed to '__VA_ARGS__'
* Parameters like 'a...' will be changed to 'a'.
* @since 5.0
*/
char[][] getParameterPlaceholderList();
/**
* Returns the image of the expansion (also containing comments).
* @since 5.0
*/
char[] getExpansionImage();
} }

View file

@ -21,24 +21,44 @@ public interface IToken {
public int getType(); public int getType();
public String getImage(); public String getImage();
public char [] getCharImage(); public char [] getCharImage();
public char [] getFilename();
public int getOffset(); public int getOffset();
public int getLength(); public int getLength();
public int getEndOffset(); public int getEndOffset();
// NOTE:if the token spans lines due to escaped newlines then
// the line number returned is the last one
public int getLineNumber();
public IToken getNext(); public IToken getNext();
public void setNext(IToken t); public void setNext(IToken t);
public void setType(int i); public void setType(int i);
// queries
public boolean looksLikeExpression();
public boolean isPointer();
public boolean isOperator(); public boolean isOperator();
public boolean canBeAPrefix();
/**
* @deprecated semantics is unclear, depends on the parser.
* will be removed in 5.0
*/
public boolean isPointer();
/**
* @deprecated semantics is unclear, depends on the parser.
* will be removed in 5.0
*/
public boolean canBeAPrefix();
/**
* @deprecated semantics is unclear, depends on the parser.
* will be removed in 5.0
*/
public boolean looksLikeExpression();
/**
* @deprecated will be removed in 5.0
*/
public int getLineNumber();
/**
* @deprecated will be removed in 5.0
*/
public char [] getFilename();
// Token types // Token types
int FIRST_RESERVED_PREPROCESSOR= -200; int FIRST_RESERVED_PREPROCESSOR= -200;
@ -84,6 +104,7 @@ public interface IToken {
static public final int tNOT = 36; static public final int tNOT = 36;
static public final int tEQUAL = 37; static public final int tEQUAL = 37;
static public final int tASSIGN = 38; static public final int tASSIGN = 38;
static public final int tUNKNOWN_CHAR= 39;
static public final int tSHIFTL = 40; static public final int tSHIFTL = 40;
static public final int tLTEQUAL = 41; static public final int tLTEQUAL = 41;
static public final int tLT = 42; static public final int tLT = 42;
@ -97,7 +118,7 @@ public interface IToken {
static public final int tDOT = 50; static public final int tDOT = 50;
static public final int tDIVASSIGN = 51; static public final int tDIVASSIGN = 51;
static public final int tDIV = 52; static public final int tDIV = 52;
static public final int tBACKSLASH= 53; static public final int tOTHER_CHARACTER= 53;
/** @deprecated use {@link #tAND} */ /** @deprecated use {@link #tAND} */
static public final int t_and = 54; static public final int t_and = 54;

View file

@ -8,6 +8,7 @@
* Contributors: * Contributors:
* IBM Rational Software - Initial API and implementation * IBM Rational Software - Initial API and implementation
* Anton Leherbauer (Wind River Systems) * Anton Leherbauer (Wind River Systems)
* Markus Schorn (Wind River Systems)
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.core.parser; package org.eclipse.cdt.core.parser;
@ -233,7 +234,6 @@ public class Keywords {
public static final char[] cpDIV = "/".toCharArray(); //$NON-NLS-1$ public static final char[] cpDIV = "/".toCharArray(); //$NON-NLS-1$
public static final char[] cpPOUND = "#".toCharArray(); //$NON-NLS-1$ public static final char[] cpPOUND = "#".toCharArray(); //$NON-NLS-1$
public static final char[] cpPOUNDPOUND = "##".toCharArray(); //$NON-NLS-1$ public static final char[] cpPOUNDPOUND = "##".toCharArray(); //$NON-NLS-1$
public static final char[] cpBACKSLASH = "\\".toCharArray(); //$NON-NLS-1$
// gcc extensions // gcc extensions
public static final char[] cpMIN = "<?".toCharArray(); //$NON-NLS-1$ public static final char[] cpMIN = "<?".toCharArray(); //$NON-NLS-1$
@ -366,7 +366,6 @@ public class Keywords {
public static void addKeywordsPreprocessor(CharArrayIntMap ppKeywords) { public static void addKeywordsPreprocessor(CharArrayIntMap ppKeywords) {
// Preprocessor keywords // Preprocessor keywords
ppKeywords = new CharArrayIntMap(16, IPreprocessorDirective.ppInvalid);
ppKeywords.put(Keywords.cIF, IPreprocessorDirective.ppIf); ppKeywords.put(Keywords.cIF, IPreprocessorDirective.ppIf);
ppKeywords.put(Keywords.cIFDEF, IPreprocessorDirective.ppIfdef); ppKeywords.put(Keywords.cIFDEF, IPreprocessorDirective.ppIfdef);
ppKeywords.put(Keywords.cIFNDEF, IPreprocessorDirective.ppIfndef); ppKeywords.put(Keywords.cIFNDEF, IPreprocessorDirective.ppIfndef);

View file

@ -1,5 +1,5 @@
/******************************************************************************* /*******************************************************************************
* Copyright (c) 2004, 2006 IBM Corporation and others. * Copyright (c) 2004, 2007 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials * All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0 * are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at * which accompanies this distribution, and is available at
@ -7,6 +7,7 @@
* *
* Contributors: * Contributors:
* IBM Corporation - initial API and implementation * IBM Corporation - initial API and implementation
* Markus Schorn (Wind River Systems)
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.core.parser.util; package org.eclipse.cdt.core.parser.util;
@ -27,7 +28,7 @@ public class CharArrayIntMap extends CharTable {
protected void resize(int size) { protected void resize(int size) {
int[] oldValueTable = valueTable; int[] oldValueTable = valueTable;
valueTable = new int[size]; valueTable = new int[size];
System.arraycopy(oldValueTable, 0, valueTable, 0, oldValueTable.length); System.arraycopy(oldValueTable, 0, valueTable, 0, Math.min(size, oldValueTable.length));
super.resize(size); super.resize(size);
} }

View file

@ -1,5 +1,5 @@
/******************************************************************************* /*******************************************************************************
* Copyright (c) 2004, 2006 IBM Corporation and others. * Copyright (c) 2004, 2007 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials * All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0 * are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at * which accompanies this distribution, and is available at
@ -7,6 +7,7 @@
* *
* Contributors: * Contributors:
* IBM Corporation - initial API and implementation * IBM Corporation - initial API and implementation
* Markus Schorn (Wind River Systems)
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.core.parser.util; package org.eclipse.cdt.core.parser.util;
@ -29,7 +30,7 @@ public class CharTable extends HashTable {
protected void resize(int size) { protected void resize(int size) {
char[][] oldKeyTable = keyTable; char[][] oldKeyTable = keyTable;
keyTable = new char[size][]; keyTable = new char[size][];
System.arraycopy(oldKeyTable, 0, keyTable, 0, oldKeyTable.length); System.arraycopy(oldKeyTable, 0, keyTable, 0, Math.min(size, oldKeyTable.length));
super.resize(size); super.resize(size);
} }

View file

@ -365,7 +365,7 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
last = consume(); last = consume();
} }
if (LT(1) == IToken.tCOMPL) if (LT(1) == IToken.tBITCOMPLEMENT)
consume(); consume();
switch (LT(1)) { switch (LT(1)) {
@ -393,7 +393,7 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
if (LT(1) == IToken.t_template) if (LT(1) == IToken.t_template)
consume(); consume();
if (LT(1) == IToken.tCOMPL) if (LT(1) == IToken.tBITCOMPLEMENT)
consume(); consume();
switch (LT(1)) { switch (LT(1)) {
@ -1373,7 +1373,7 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
return unaryOperatorCastExpression(IASTUnaryExpression.op_minus);// IASTExpression.Kind.UNARY_MINUS_CASTEXPRESSION); return unaryOperatorCastExpression(IASTUnaryExpression.op_minus);// IASTExpression.Kind.UNARY_MINUS_CASTEXPRESSION);
case IToken.tNOT: case IToken.tNOT:
return unaryOperatorCastExpression(IASTUnaryExpression.op_not);// IASTExpression.Kind.UNARY_NOT_CASTEXPRESSION); return unaryOperatorCastExpression(IASTUnaryExpression.op_not);// IASTExpression.Kind.UNARY_NOT_CASTEXPRESSION);
case IToken.tCOMPL: case IToken.tBITCOMPLEMENT:
return unaryOperatorCastExpression(IASTUnaryExpression.op_tilde);// IASTExpression.Kind.UNARY_TILDE_CASTEXPRESSION); return unaryOperatorCastExpression(IASTUnaryExpression.op_tilde);// IASTExpression.Kind.UNARY_TILDE_CASTEXPRESSION);
case IToken.tINCR: case IToken.tINCR:
return unaryOperatorCastExpression(IASTUnaryExpression.op_prefixIncr);// IASTExpression.Kind.UNARY_INCREMENT); return unaryOperatorCastExpression(IASTUnaryExpression.op_prefixIncr);// IASTExpression.Kind.UNARY_INCREMENT);
@ -1864,7 +1864,7 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
case IToken.tCOLONCOLON: case IToken.tCOLONCOLON:
case IToken.t_operator: case IToken.t_operator:
case IToken.tCOMPLETION: case IToken.tCOMPLETION:
case IToken.tCOMPL: { case IToken.tBITCOMPLEMENT: {
IASTName name = idExpression(); IASTName name = idExpression();
IASTIdExpression idExpression = createIdExpression(); IASTIdExpression idExpression = createIdExpression();
((ASTNode) idExpression).setOffsetAndLength(((ASTNode) name) ((ASTNode) idExpression).setOffsetAndLength(((ASTNode) name)
@ -3987,7 +3987,7 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
break overallLoop; break overallLoop;
if (!LA(2).looksLikeExpression()&& !forNewTypeId) { if (!looksLikeExpression(LA(2))&& !forNewTypeId) {
// parameterDeclarationClause // parameterDeclarationClause
isFunction = true; isFunction = true;
// TODO need to create a temporary scope object here // TODO need to create a temporary scope object here
@ -4224,7 +4224,31 @@ public class GNUCPPSourceParser extends AbstractGNUSourceCodeParser {
} }
/** private boolean looksLikeExpression(IToken t) {
switch (t.getType()) {
case IToken.tINTEGER:
case IToken.t_false:
case IToken.t_true:
case IToken.tSTRING:
case IToken.tLSTRING:
case IToken.tFLOATINGPT:
case IToken.tCHAR:
case IToken.tAMPER:
case IToken.tDOT:
case IToken.tLPAREN:
case IToken.tMINUS:
case IToken.tSTAR:
case IToken.tPLUS:
case IToken.tNOT:
case IToken.tBITCOMPLEMENT:
return true;
default:
break;
}
return false;
}
/**
* @return * @return
*/ */
protected IASTProblemTypeId createTypeIDProblem() { protected IASTProblemTypeId createTypeIDProblem() {

View file

@ -136,5 +136,5 @@ class ASTMacroReferenceName extends ASTPreprocessorName {
return true; return true;
} }
// mstodo once names support image-locations, return correct ones here. // mstodo- image-locations.
} }

View file

@ -377,7 +377,7 @@ class DependencyTree extends ASTInclusionNode implements IDependencyTree {
} }
public String getTranslationUnitPath() { public String getTranslationUnitPath() {
return fLocationCtx.getFilename(); return fLocationCtx.getFilePath();
} }
} }
@ -393,7 +393,7 @@ class ASTFileLocation implements IASTFileLocation {
} }
public String getFileName() { public String getFileName() {
return fLocationCtx.getFilename(); return fLocationCtx.getFilePath();
} }
public IASTFileLocation asFileLocation() { public IASTFileLocation asFileLocation() {

View file

@ -19,7 +19,6 @@ import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
/** /**
* Used to evaluate expressions in preprocessor directives. * Used to evaluate expressions in preprocessor directives.
* @since 5.0
*/ */
class ExpressionEvaluator { class ExpressionEvaluator {
static class EvalException extends Exception { static class EvalException extends Exception {
@ -28,6 +27,7 @@ class ExpressionEvaluator {
public EvalException(int problemID, char[] problemArg) { public EvalException(int problemID, char[] problemArg) {
fProblemID= problemID; fProblemID= problemID;
fProblemArg= problemArg;
} }
public int getProblemID() { public int getProblemID() {
@ -42,8 +42,8 @@ class ExpressionEvaluator {
private Token fTokens; private Token fTokens;
private CharArrayObjectMap fDictionary; private CharArrayObjectMap fDictionary;
public boolean evaluate(Token condition, CharArrayObjectMap dictionary) throws EvalException { public boolean evaluate(TokenList condition, CharArrayObjectMap dictionary) throws EvalException {
fTokens= condition; fTokens= condition.first();
fDictionary= dictionary; fDictionary= dictionary;
return expression() != 0; return expression() != 0;
} }
@ -136,7 +136,7 @@ class ExpressionEvaluator {
private long relationalExpression() throws EvalException { private long relationalExpression() throws EvalException {
long r1 = shiftExpression(); long r1 = shiftExpression();
for (int t = LA(); t == IToken.tLT || t == IToken.tLTEQUAL || t == IToken.tGT for (int t = LA(); t == IToken.tLT || t == IToken.tLTEQUAL || t == IToken.tGT
|| t == IToken.tGTEQUAL; t = LA()) { || t == IToken.tGTEQUAL || t == IToken.tASSIGN; t = LA()) {
consume(); consume();
long r2 = shiftExpression(); long r2 = shiftExpression();
switch (t) { switch (t) {
@ -152,6 +152,8 @@ class ExpressionEvaluator {
case IToken.tGTEQUAL: case IToken.tGTEQUAL:
r1 = (r1 >= r2) ? 1 : 0; r1 = (r1 >= r2) ? 1 : 0;
break; break;
case IToken.tASSIGN:
throw new EvalException(IProblem.SCANNER_ASSIGNMENT_NOT_ALLOWED, null);
} }
} }
return r1; return r1;
@ -235,7 +237,8 @@ class ExpressionEvaluator {
} }
throw new EvalException(IProblem.SCANNER_MISSING_R_PAREN, null); throw new EvalException(IProblem.SCANNER_MISSING_R_PAREN, null);
case IToken.tIDENTIFIER: case IToken.tIDENTIFIER:
return 1; consume();
return 0;
default: default:
throw new EvalException(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, null); throw new EvalException(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, null);
} }
@ -350,14 +353,26 @@ class ExpressionEvaluator {
} }
private long getNumber(char[] tokenImage, int from, int to, int base, int problemID) throws EvalException { private long getNumber(char[] tokenImage, int from, int to, int base, int problemID) throws EvalException {
if (from == to) {
throw new EvalException(problemID, tokenImage);
}
long result= 0; long result= 0;
for (int i = from; i < to; i++) { int i= from;
for (; i < to; i++) {
int digit= getDigit(tokenImage[i]); int digit= getDigit(tokenImage[i]);
if (digit >= base) { if (digit >= base) {
throw new EvalException(problemID, tokenImage); break;
} }
result= result*base + digit; result= result*base + digit;
} }
for (; i < to; i++) {
switch(tokenImage[i]) {
case 'u' : case 'l': case 'U': case 'L':
break;
default:
throw new EvalException(problemID, tokenImage);
}
}
return result; return result;
} }

View file

@ -17,6 +17,10 @@ import org.eclipse.cdt.core.parser.IProblem;
* @since 5.0 * @since 5.0
*/ */
public interface ILexerLog { public interface ILexerLog {
ILexerLog NULL = new ILexerLog() {
public void handleComment(boolean isBlockComment, int offset, int endOffset) {}
public void handleProblem(int problemID, char[] info, int offset, int endOffset) {}
};
/** /**
* A problem has been detected * A problem has been detected

View file

@ -12,9 +12,22 @@
package org.eclipse.cdt.internal.core.parser.scanner; package org.eclipse.cdt.internal.core.parser.scanner;
/** /**
* Interface for modeling contexts that can deal with offsets. These are: * Interface between location map and preprocessor for modeling contexts that can deal with offsets.
* These are:
* synthetic contexts used for pre-included files, file-contexts, macro-expansions. * synthetic contexts used for pre-included files, file-contexts, macro-expansions.
* @since 5.0 * @since 5.0
*/ */
public interface ILocationCtx { public interface ILocationCtx {
/**
* If this is a file context the filename of this context is returned,
* otherwise the filename of the first enclosing context that is a file context is returned.
*/
String getFilePath();
/**
* Returns the enclosing context or <code>null</code> if this is the translation unit context.
*/
ILocationCtx getParent();
} }

View file

@ -65,6 +65,7 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
/** /**
* @see IASTTranslationUnit#getContainingFilename() * @see IASTTranslationUnit#getContainingFilename()
* mstodo- scanner removal should be renamed
*/ */
public String getContainingFilename(int offset); public String getContainingFilename(int offset);

View file

@ -18,4 +18,6 @@ package org.eclipse.cdt.internal.core.parser.scanner;
*/ */
public class ImageLocationInfo { public class ImageLocationInfo {
public static final ImageLocationInfo[] NO_LOCATION_INFOS= {};
} }

View file

@ -10,7 +10,6 @@
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.internal.core.parser.scanner; package org.eclipse.cdt.internal.core.parser.scanner;
import org.eclipse.cdt.core.dom.ast.IASTProblem;
import org.eclipse.cdt.core.parser.IGCCToken; import org.eclipse.cdt.core.parser.IGCCToken;
import org.eclipse.cdt.core.parser.IProblem; import org.eclipse.cdt.core.parser.IProblem;
import org.eclipse.cdt.core.parser.IToken; import org.eclipse.cdt.core.parser.IToken;
@ -23,7 +22,7 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
* Returns preprocessor tokens. * Returns preprocessor tokens.
* <p> * <p>
* In addition to the preprocessor tokens the following tokens may also be returned: * In addition to the preprocessor tokens the following tokens may also be returned:
* {@link #tEND_OF_INPUT}, {@link IToken#tCOMPLETION}. * {@link #tBEFORE_INPUT}, {@link #tEND_OF_INPUT}, {@link IToken#tCOMPLETION}.
* <p> * <p>
* Number literals are split up into {@link IToken#tINTEGER} and {@link IToken#tFLOATINGPT}. * Number literals are split up into {@link IToken#tINTEGER} and {@link IToken#tFLOATINGPT}.
* No checks are done on the number literals. * No checks are done on the number literals.
@ -67,6 +66,7 @@ final public class Lexer {
// the input to the lexer // the input to the lexer
private final char[] fInput; private final char[] fInput;
private int fStart;
private int fLimit; private int fLimit;
// after phase 3 (newline, trigraph, line-splice) // after phase 3 (newline, trigraph, line-splice)
@ -75,7 +75,7 @@ final public class Lexer {
private int fCharPhase3; private int fCharPhase3;
private boolean fInsideIncludeDirective= false; private boolean fInsideIncludeDirective= false;
private Token fToken= new SimpleToken(tBEFORE_INPUT, 0, 0); private Token fToken;
// for the few cases where we have to lookahead more than one character // for the few cases where we have to lookahead more than one character
private int fMarkOffset; private int fMarkOffset;
@ -85,18 +85,16 @@ final public class Lexer {
public Lexer(char[] input, LexerOptions options, ILexerLog log) { public Lexer(char[] input, LexerOptions options, ILexerLog log) {
fInput= input; this(input, 0, input.length, options, log);
fLimit= input.length;
fOptions= options;
fLog= log;
nextCharPhase3();
} }
public Lexer(char[] input, int limit, LexerOptions options, ILexerLog log) { public Lexer(char[] input, int start, int end, LexerOptions options, ILexerLog log) {
fInput= input; fInput= input;
fLimit= limit; fStart= fOffset= fEndOffset= start;
fLimit= end;
fOptions= options; fOptions= options;
fLog= log; fLog= log;
fToken= new SimpleToken(tBEFORE_INPUT, start, start);
nextCharPhase3(); nextCharPhase3();
} }
@ -107,7 +105,7 @@ final public class Lexer {
fOptions.fSupportContentAssist= true; fOptions.fSupportContentAssist= true;
fLimit= Math.min(fLimit, fInput.length); fLimit= Math.min(fLimit, fInput.length);
// re-initialize // re-initialize
fOffset= fEndOffset= 0; fOffset= fEndOffset= fStart;
nextCharPhase3(); nextCharPhase3();
} }
@ -115,8 +113,8 @@ final public class Lexer {
* Call this before consuming the name-token in the include directive. It causes the header-file * Call this before consuming the name-token in the include directive. It causes the header-file
* tokens to be created. * tokens to be created.
*/ */
public void setInsideIncludeDirective() { public void setInsideIncludeDirective(boolean val) {
fInsideIncludeDirective= true; fInsideIncludeDirective= val;
} }
/** /**
@ -131,7 +129,8 @@ final public class Lexer {
* @throws OffsetLimitReachedException when completion is requested in a literal or a header-name. * @throws OffsetLimitReachedException when completion is requested in a literal or a header-name.
*/ */
public Token nextToken() throws OffsetLimitReachedException { public Token nextToken() throws OffsetLimitReachedException {
fFirstTokenAfterNewline= fToken.getType() == tNEWLINE; final int t= fToken.getType();
fFirstTokenAfterNewline= t == tNEWLINE || t == tBEFORE_INPUT;
return fToken= fetchToken(); return fToken= fetchToken();
} }
@ -169,6 +168,33 @@ final public class Lexer {
} }
} }
/**
* Advances to the next newline.
* @return the list of tokens found on this line.
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
*/
public final void getTokensOfLine(int origin, TokenList result) throws OffsetLimitReachedException {
Token t= fToken;
while(true) {
switch(t.getType()) {
case IToken.tCOMPLETION:
fToken= t;
throw new OffsetLimitReachedException(origin, t);
case Lexer.tEND_OF_INPUT:
fToken= t;
if (fOptions.fSupportContentAssist) {
throw new OffsetLimitReachedException(origin, null);
}
return;
case Lexer.tNEWLINE:
fToken= t;
return;
}
result.append(t);
t= fetchToken();
}
}
/** /**
* Advances to the next pound token that starts a preprocessor directive. * Advances to the next pound token that starts a preprocessor directive.
* @return pound token of the directive or end-of-input. * @return pound token of the directive or end-of-input.
@ -196,7 +222,6 @@ final public class Lexer {
break; break;
} }
} }
t= fetchToken();
} }
fToken= t; fToken= t;
return t; return t;
@ -264,7 +289,7 @@ final public class Lexer {
nextCharPhase3(); nextCharPhase3();
return identifier(start, 2); return identifier(start, 2);
} }
return newToken(IToken.tBACKSLASH, start); return newToken(IToken.tOTHER_CHARACTER, start, 1);
case '0': case '1': case '2': case '3': case '4': case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9': case '5': case '6': case '7': case '8': case '9':
@ -511,9 +536,8 @@ final public class Lexer {
} }
break; break;
} }
// handles for instance @
handleProblem(IASTProblem.SCANNER_BAD_CHARACTER, new char[] {(char) c}, start); return newToken(IToken.tOTHER_CHARACTER, start, 1);
// loop is continued, character is treated as white-space.
} }
} }

View file

@ -37,11 +37,11 @@ abstract class LocationCtx implements ILocationCtx {
} }
} }
public String getFilename() { public String getFilePath() {
return fParent.getFilename(); return fParent.getFilePath();
} }
final public LocationCtx getParent() { final public ILocationCtx getParent() {
return fParent; return fParent;
} }
/** /**
@ -262,7 +262,7 @@ class FileLocationCtx extends ContainerLocationCtx {
} }
} }
public final String getFilename() { public final String getFilePath() {
return fFilename; return fFilename;
} }
@ -331,5 +331,5 @@ class MacroExpansionCtx extends LocationCtx {
return fLength; return fLength;
} }
// mstodo once image locations are supported we need to handle those in here // mstodo- image locations
} }

View file

@ -183,12 +183,11 @@ public class LocationMap implements ILocationResolver {
public void popContext(ILocationCtx locationCtx) { public void popContext(ILocationCtx locationCtx) {
assert fCurrentContext == locationCtx; assert fCurrentContext == locationCtx;
final LocationCtx child= fCurrentContext; final LocationCtx child= fCurrentContext;
final LocationCtx parent= fCurrentContext.getParent(); final LocationCtx parent= (LocationCtx) fCurrentContext.getParent();
if (parent != null) { if (parent != null) {
fCurrentContext= child.getParent(); fCurrentContext= parent;
fLastChildInsertionOffset= child.fParentEndOffset; fLastChildInsertionOffset= child.fParentEndOffset;
parent.addChildSequenceLength(child.getSequenceLength()); parent.addChildSequenceLength(child.getSequenceLength());
fCurrentContext= parent;
} }
} }
@ -328,8 +327,8 @@ public class LocationMap implements ILocationResolver {
* Returns the filename of the current context. If the context is a macro-expansion the filename of * Returns the filename of the current context. If the context is a macro-expansion the filename of
* the enclosing file is returned. * the enclosing file is returned.
*/ */
public String getCurrentFilename() { public String getCurrentFilePath() {
return fCurrentContext.getFilename(); return fCurrentContext.getFilePath();
} }
/** /**
@ -337,13 +336,13 @@ public class LocationMap implements ILocationResolver {
* <p> * <p>
* You must insert all child contexts before the given offset before conversion. * You must insert all child contexts before the given offset before conversion.
*/ */
private int getSequenceNumberForOffset(int offset) { int getSequenceNumberForOffset(int offset) {
return fCurrentContext.getSequenceNumberForOffset(offset, offset < fLastChildInsertionOffset); return fCurrentContext.getSequenceNumberForOffset(offset, offset < fLastChildInsertionOffset);
} }
public String getContainingFilename(int sequenceNumber) { public String getContainingFilename(int sequenceNumber) {
LocationCtx ctx= fRootContext.ctxForNumberRange(sequenceNumber, 1); LocationCtx ctx= fRootContext.ctxForNumberRange(sequenceNumber, 1);
return new String(ctx.getFilename()); return new String(ctx.getFilePath());
} }
public IASTFileLocation getMappedFileLocation(int sequenceNumber, int length) { public IASTFileLocation getMappedFileLocation(int sequenceNumber, int length) {
@ -423,21 +422,21 @@ public class LocationMap implements ILocationResolver {
public void cleanup() { public void cleanup() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
// mstodo get rid of IASTNodeLocation // mstodo- locations
public IASTFileLocation flattenLocations(IASTNodeLocation[] locations) { public IASTFileLocation flattenLocations(IASTNodeLocation[] locations) {
if (locations.length != 1 || !(locations[0] instanceof IASTFileLocation)) { if (locations.length != 1 || !(locations[0] instanceof IASTFileLocation)) {
throw new IllegalArgumentException(); throw new IllegalArgumentException();
} }
return (IASTFileLocation) locations[0]; return (IASTFileLocation) locations[0];
} }
// mstodo get rid of IASTNodeLocation // mstodo- locations
public IASTNodeLocation[] getLocations(int offset, int length) { public IASTNodeLocation[] getLocations(int offset, int length) {
return new IASTNodeLocation[] {getMappedFileLocation(offset, length)}; return new IASTNodeLocation[] {getMappedFileLocation(offset, length)};
} }
public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) { public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
// mstodo get rid of IASTNodeLocation // mstodo- locations
public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) { public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) {
switch(locations.length) { switch(locations.length) {
case 0: return CharArrayUtils.EMPTY; case 0: return CharArrayUtils.EMPTY;

View file

@ -53,14 +53,6 @@ class MacroDefinitionParser {
return fExpansionOffset; return fExpansionOffset;
} }
/**
* In case the expansion was successfully parsed, the end offset is returned.
* Otherwise the return value is undefined.
*/
public int getExpansionEndOffset() {
return fExpansionEndOffset;
}
/** /**
* Parses an entire macro definition. Name must be the next token of the lexer. * Parses an entire macro definition. Name must be the next token of the lexer.
*/ */
@ -70,7 +62,8 @@ class MacroDefinitionParser {
final char[] source= lexer.getInput(); final char[] source= lexer.getInput();
final char[] nameChars= name.getCharImage(); final char[] nameChars= name.getCharImage();
final char[][] paramList= parseParamList(lexer, name); final char[][] paramList= parseParamList(lexer, name);
final Token replacement= parseExpansion(lexer, log, nameChars, paramList, fHasVarArgs); final TokenList replacement= new TokenList();
parseExpansion(lexer, log, nameChars, paramList, replacement);
if (paramList == null) { if (paramList == null) {
return new ObjectStyleMacro(nameChars, fExpansionOffset, fExpansionEndOffset, replacement, source); return new ObjectStyleMacro(nameChars, fExpansionOffset, fExpansionEndOffset, replacement, source);
} }
@ -177,7 +170,13 @@ class MacroDefinitionParser {
paramList.add(Keywords.cVA_ARGS); paramList.add(Keywords.cVA_ARGS);
next= lex.nextToken(); next= lex.nextToken();
break; break;
case IToken.tRPAREN:
if (next == null) {
next= param;
break;
}
// no break;
default: default:
throw new InvalidMacroDefinitionException(name.getCharImage()); throw new InvalidMacroDefinitionException(name.getCharImage());
} }
@ -191,16 +190,14 @@ class MacroDefinitionParser {
return (char[][]) paramList.toArray(new char[paramList.size()][]); return (char[][]) paramList.toArray(new char[paramList.size()][]);
} }
private Token parseExpansion(final Lexer lexer, final ILexerLog log, final char[] name, final char[][] paramList, final int hasVarArgs) public void parseExpansion(final Lexer lexer, final ILexerLog log, final char[] name, final char[][] paramList,
throws OffsetLimitReachedException { TokenList result) throws OffsetLimitReachedException {
final boolean allowVarArgsArray= hasVarArgs==FunctionStyleMacro.VAARGS;
boolean needParam= false; boolean needParam= false;
boolean isFirst= true;
Token needAnotherToken= null; Token needAnotherToken= null;
Token candidate= lexer.currentToken(); Token candidate= lexer.currentToken();
fExpansionOffset= candidate.getOffset(); fExpansionOffset= fExpansionEndOffset= candidate.getOffset();
Token last= new SimpleToken(Lexer.tNEWLINE, fExpansionOffset, fExpansionOffset);
final Token resultHolder= last;
loop: while(true) { loop: while(true) {
switch(candidate.getType()) { switch(candidate.getType()) {
@ -210,20 +207,31 @@ class MacroDefinitionParser {
case Lexer.tNEWLINE: case Lexer.tNEWLINE:
break loop; break loop;
case IToken.tIDENTIFIER: case IToken.tIDENTIFIER:
if (!allowVarArgsArray && CharArrayUtils.equals(Keywords.cVA_ARGS, candidate.getCharImage())) { if (paramList != null) {
log.handleProblem(IProblem.PREPROCESSOR_INVALID_VA_ARGS, null, fExpansionOffset, candidate.getEndOffset()); // convert the parameters to special tokens
final char[] image = candidate.getCharImage();
int idx= CharArrayUtils.indexOf(image, paramList);
if (idx >= 0) {
candidate= new PlaceHolderToken(CPreprocessor.tMACRO_PARAMETER, idx, candidate.getOffset(), candidate.getEndOffset(), paramList[idx]);
needParam= false;
}
else {
if (needParam) {
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
}
else if (CharArrayUtils.equals(Keywords.cVA_ARGS, image)) {
log.handleProblem(IProblem.PREPROCESSOR_INVALID_VA_ARGS, null, fExpansionOffset, candidate.getEndOffset());
}
needParam= false;
}
} }
if (needParam && CharArrayUtils.indexOf(candidate.getCharImage(), paramList) == -1) {
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
}
needParam= false;
needAnotherToken= null; needAnotherToken= null;
break; break;
case IToken.tPOUND: case IToken.tPOUND:
needParam= paramList != null; needParam= paramList != null;
break; break;
case IToken.tPOUNDPOUND: case IToken.tPOUNDPOUND:
if (needParam || resultHolder == last) { if (needParam || isFirst) {
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset()); log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
} }
needAnotherToken= candidate; needAnotherToken= candidate;
@ -237,13 +245,13 @@ class MacroDefinitionParser {
needAnotherToken= null; needAnotherToken= null;
break; break;
} }
last.setNext(candidate); last=candidate; isFirst= false;
fExpansionEndOffset= candidate.getEndOffset();
result.append(candidate);
candidate= lexer.nextToken(); candidate= lexer.nextToken();
} }
if (needAnotherToken != null) { if (needAnotherToken != null) {
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, needAnotherToken.getOffset(), needAnotherToken.getEndOffset()); log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, needAnotherToken.getOffset(), needAnotherToken.getEndOffset());
} }
fExpansionEndOffset= last.getEndOffset();
return (Token) resultHolder.getNext();
} }
} }

View file

@ -0,0 +1,539 @@
/*******************************************************************************
* Copyright (c) 2007 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Markus Schorn - initial API and implementation
*******************************************************************************/
package org.eclipse.cdt.internal.core.parser.scanner;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import org.eclipse.cdt.core.dom.ast.IASTName;
import org.eclipse.cdt.core.parser.IProblem;
import org.eclipse.cdt.core.parser.IToken;
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
import org.eclipse.cdt.internal.core.parser.scanner.Lexer.LexerOptions;
/**
* Utility class to perform macro expansion.
* @since 5.0
*/
public class MacroExpander {
private static final int ORIGIN = OffsetLimitReachedException.ORIGIN_MACRO_EXPANSION;
/**
* Marks the beginning and the end of the scope of a macro expansion. Necessary to properly
* handle recursive expansions and to figure out whether spaces are required during a stringify
* operation across such boundaries.
*/
public static final class ExpansionBoundary extends Token {
private PreprocessorMacro fScope;
private boolean fIsStart;
ExpansionBoundary(PreprocessorMacro scope, int offset, boolean isStart) {
super(CPreprocessor.tSCOPE_MARKER, offset, offset);
fScope= scope;
fIsStart= isStart;
}
public char[] getCharImage() {
return CharArrayUtils.EMPTY;
}
public String toString() {
return "{" + (fIsStart ? '+' : '-') + //$NON-NLS-1$
(fScope == null ? String.valueOf(getOffset()) : fScope.getName()) + '}';
}
public void execute(IdentityHashMap forbidden) {
if (fIsStart) {
forbidden.put(fScope, fScope);
}
else {
forbidden.remove(fScope);
}
fScope= null;
}
public Object clone() {
// when cloned for the purpose of argument substitution, the boundaries no longer prevent a
// recursive macro expansion.
ExpansionBoundary t= (ExpansionBoundary) super.clone();
t.fScope= null;
return t;
}
}
/**
* Combines a list of tokens with the preprocessor to form the input for macro expansion.
*/
private class TokenSource extends TokenList {
private boolean fUseCpp;
public TokenSource(boolean useCpp) {
fUseCpp= true;
}
public Token fetchFirst() throws OffsetLimitReachedException {
Token t= removeFirst();
if (t == null && fUseCpp) {
t= fCpp.fetchTokenFromPreprocessor();
fEndOffset= t.getEndOffset();
}
return t;
}
public boolean findLParenthesis(IdentityHashMap forbidden) throws OffsetLimitReachedException {
Token t= first();
while (t != null) {
switch (t.getType()) {
case Lexer.tNEWLINE:
break;
case CPreprocessor.tSCOPE_MARKER:
((ExpansionBoundary) t).execute(forbidden);
break;
case IToken.tLPAREN:
return true;
default:
return false;
}
removeFirst();
t= first();
}
if (fUseCpp) {
return fCpp.findLParenthesisInContext();
}
return false;
}
}
private final MacroDefinitionParser fDefinitionParser;
private final CharArrayObjectMap fDictionary;
private final LocationMap fLocationMap;
private final CPreprocessor fCpp;
private final LexerOptions fLexOptions;
private int fEndOffset;
private ArrayList fImplicitMacroExpansions= new ArrayList();
private boolean fCompletionMode;
private int fStartOffset;
public MacroExpander(CPreprocessor cpp, CharArrayObjectMap dict, LocationMap locationMap, MacroDefinitionParser mdp, LexerOptions lexOptions) {
fCpp= cpp;
fDictionary= dict;
fLocationMap= locationMap;
fDefinitionParser= mdp;
fLexOptions= lexOptions;
}
/**
* Expects that the identifier has been consumed, stores the result in the list provided and returns the
* end offset of the last token read from the preprocessor input.
*/
public int expand(PreprocessorMacro macro, Token identifier, boolean completionMode, TokenList expansion) throws OffsetLimitReachedException {
fStartOffset= identifier.getOffset();
fEndOffset= identifier.getEndOffset();
fCompletionMode= completionMode;
IdentityHashMap forbidden= new IdentityHashMap();
// setup input sequence
TokenSource input= new TokenSource(true);
TokenList firstExpansion= expandOne(macro, forbidden, input, fStartOffset, fEndOffset);
input.prepend(firstExpansion);
expandAll(input, forbidden, expansion);
return fEndOffset;
}
/**
* Expects that the identifier of the macro expansion has been consumed.
*/
private TokenList expandOne(PreprocessorMacro macro, IdentityHashMap forbidden, TokenSource input, int offset, int endOffset)
throws OffsetLimitReachedException {
TokenList result= new TokenList();
result.append(new ExpansionBoundary(macro, offset, true));
if (macro.isFunctionStyle()) {
final TokenSource[] argInputs= new TokenSource[macro.getParameterPlaceholderList().length];
endOffset= parseArguments(input, (FunctionStyleMacro) macro, argInputs);
TokenList[] clonedArgs= new TokenList[argInputs.length];
TokenList[] expandedArgs= new TokenList[argInputs.length];
for (int i = 0; i < argInputs.length; i++) {
final TokenSource argInput = argInputs[i];
clonedArgs[i]= argInput.cloneTokens();
final TokenList expandedArg= new TokenList();
expandAll(argInput, forbidden, expandedArg);
expandedArgs[i]= expandedArg;
}
replaceArgs(macro, clonedArgs, expandedArgs, result);
}
else {
objStyleTokenPaste(macro, macro.getTokens(fDefinitionParser, fLexOptions), result);
}
result.append(new ExpansionBoundary(macro, endOffset, false));
return result;
}
private void expandAll(TokenSource input, IdentityHashMap forbidden, TokenList result) throws OffsetLimitReachedException {
Token t= input.removeFirst();
while(t != null) {
switch(t.getType()) {
case CPreprocessor.tSCOPE_MARKER:
((ExpansionBoundary) t).execute(forbidden);
result.append(t);
break;
case IToken.tIDENTIFIER:
PreprocessorMacro macro= (PreprocessorMacro) fDictionary.get(t.getCharImage());
if (macro != null && !forbidden.containsKey(macro)) {
final boolean isFunctionStyle= macro.isFunctionStyle();
if (!isFunctionStyle || input.findLParenthesis(forbidden)) {
// mstodo- image location
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
TokenList replacement= expandOne(macro, forbidden, input, t.getOffset(), t.getEndOffset());
input.prepend(replacement);
t= null;
}
}
if (t != null) {
t.setType(CPreprocessor.tEXPANDED_IDENTIFIER); // prevent any further expansion
result.append(t);
}
break;
default:
result.append(t);
break;
}
t= input.removeFirst();
}
}
/**
* Expects that the identifier has been consumed.
* @throws OffsetLimitReachedException
*/
private int parseArguments(TokenSource input, FunctionStyleMacro macro, TokenSource[] result) throws OffsetLimitReachedException {
final int argCount= macro.getParameterPlaceholderList().length;
final boolean hasVarargs= macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS;
final int requiredArgs= hasVarargs ? argCount-1 : argCount;
int endOffset= 0;
int idx= 0;
int nesting = -1;
for (int i = 0; i < result.length; i++) {
result[i]= new TokenSource(false);
}
loop: while (true) {
Token t= input.fetchFirst();
if (t == null) {
break loop;
}
endOffset= t.getEndOffset();
switch(t.getType()) {
case Lexer.tEND_OF_INPUT:
if (fCompletionMode) {
throw new OffsetLimitReachedException(ORIGIN, null);
}
break loop;
case IToken.tCOMPLETION:
throw new OffsetLimitReachedException(ORIGIN, t);
case Lexer.tNEWLINE:
assert false; // we should not get any newlines from macros or the preprocessor.
break;
case IToken.tLPAREN:
if (++nesting > 0) {
result[idx].append(t);
}
break;
case IToken.tRPAREN:
if (--nesting < 0) {
idx++;
break loop;
}
result[idx].append(t);
break;
case IToken.tCOMMA:
if (nesting == 0) {
if (idx < argCount-1) { // next argument
idx++;
break;
}
else if (!hasVarargs) {
// too many arguments
handleProblem(IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, macro.getNameCharArray());
break loop;
}
}
// part of argument
result[idx].append(t);
break;
default:
if (nesting < 0) {
assert false; // no leading parenthesis, which is checked before the method is called.
break loop;
}
result[idx].append(t);
break;
}
}
if (idx < requiredArgs) {
handleProblem(IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, macro.getNameCharArray());
}
return endOffset;
}
private void handleProblem(int problemID, char[] arg) {
fCpp.handleProblem(problemID, arg, fStartOffset, fEndOffset);
}
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
TokenList input= macro.getTokens(fDefinitionParser, fLexOptions);
Token n;
Token pasteArg1= null;
for (Token t= input.first(); t != null; t=n) {
n= (Token) t.getNext();
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
switch(t.getType()) {
case CPreprocessor.tMACRO_PARAMETER:
int idx= ((PlaceHolderToken) t).getIndex();
if (idx < args.length) { // be defensive
TokenList arg= pasteNext ? args[idx] : expandedArgs[idx];
pasteArg1= cloneAndAppend(arg.first(), result, pasteNext);
}
break;
case IToken.tPOUND:
StringBuffer buf= new StringBuffer();
buf.append('"');
if (n != null && n.getType() == CPreprocessor.tMACRO_PARAMETER) {
idx= ((PlaceHolderToken) n).getIndex();
if (idx < args.length) { // be defensive
stringify(args[idx], buf);
}
n= (Token) n.getNext();
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
}
buf.append('"');
final int length= buf.length();
final char[] image= new char[length];
buf.getChars(0, length, image, 0);
pasteArg1= appendToResult(new ImageToken(IToken.tSTRING, 0, 0, image), result, pasteNext);
break;
case IToken.tPOUNDPOUND:
if (pasteArg1 != null) {
Token pasteArg2= null;
Token rest= null;
if (n != null) {
if (n.getType() == CPreprocessor.tMACRO_PARAMETER) {
idx= ((PlaceHolderToken) n).getIndex();
if (idx < args.length) { // be defensive
TokenList arg= args[idx];
pasteArg2= arg.first();
if (pasteArg2 != null) {
rest= (Token) pasteArg2.getNext();
}
}
}
else {
pasteArg2= n;
}
n= (Token) n.getNext();
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
}
t= tokenpaste(pasteArg1, pasteArg2, macro);
if (t != null) {
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext && rest == null);
}
if (rest != null) {
pasteArg1= cloneAndAppend(rest, result, pasteNext);
}
}
break;
default:
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
break;
}
}
}
private void objStyleTokenPaste(PreprocessorMacro macro, TokenList input, TokenList result) {
Token n;
Token pasteArg1= null;
for (Token t= input.first(); t != null; t=n) {
n= (Token) t.getNext();
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
switch(t.getType()) {
case IToken.tPOUNDPOUND:
if (pasteArg1 != null) {
Token pasteArg2= null;
if (n != null) {
pasteArg2= n;
n= (Token) n.getNext();
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
}
t= tokenpaste(pasteArg1, pasteArg2, macro);
if (t != null) {
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
}
}
break;
default:
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext);
break;
}
}
}
private Token appendToResult(Token t, TokenList result, boolean pasteNext) {
if (pasteNext) {
return t;
}
result.append(t);
return null;
}
private Token cloneAndAppend(Token tokens, TokenList result, boolean pasteNext) {
Token t= tokens;
Token r= t == null ? null : (Token) t.getNext();
while (r != null) {
result.append((Token) t.clone());
t= r;
r= (Token) r.getNext();
}
if (t != null && !pasteNext) {
result.append((Token) t.clone());
return null;
}
return t;
}
private Token tokenpaste(Token arg1, Token arg2, PreprocessorMacro macro) {
if (arg2 == null) {
if (arg1.getType() == IToken.tCOMMA) { // gcc-extension for variadic macros
return null;
}
return arg1;
}
final char[] image1= arg1.getCharImage();
final char[] image2= arg2.getCharImage();
final int l1 = image1.length;
final int l2 = image2.length;
final char[] image= new char[l1+l2];
System.arraycopy(image1, 0, image, 0, l1);
System.arraycopy(image2, 0, image, l1, l2);
Lexer lex= new Lexer(image, fLexOptions, ILexerLog.NULL);
try {
Token t1= lex.nextToken();
Token t2= lex.nextToken();
if (t1.getType() != Lexer.tEND_OF_INPUT && t2.getType() == Lexer.tEND_OF_INPUT) {
t1.setOffset(arg1.getOffset(), arg2.getEndOffset());
return t1;
}
} catch (OffsetLimitReachedException e) {
}
handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, macro.getNameCharArray());
return null;
}
private void stringify(TokenList tokenList, StringBuffer buf) {
Token t= tokenList.first();
if (t == null) {
return;
}
int endOffset= t.getOffset();
for (; t != null; t= (Token) t.getNext()) {
switch(t.getType()) {
case IToken.tSTRING:
case IToken.tLSTRING:
case IToken.tCHAR:
case IToken.tLCHAR:
if (endOffset < t.getOffset()) {
buf.append(' ');
}
endOffset= t.getEndOffset();
final char[] image= t.getCharImage();
for (int i = 0; i < image.length; i++) {
final char c = image[i];
if (c == '"' || c == '\\') {
buf.append('\\');
}
buf.append(c);
}
break;
case CPreprocessor.tSCOPE_MARKER:
ExpansionBoundary sm= (ExpansionBoundary) t;
if (sm.fIsStart) {
if (endOffset < t.getOffset()) {
buf.append(' ');
}
endOffset= Integer.MAX_VALUE;
}
else {
endOffset= t.getEndOffset();
}
break;
default:
if (endOffset < t.getOffset()) {
buf.append(' ');
}
endOffset= t.getEndOffset();
buf.append(t.getCharImage());
break;
}
}
}
public IASTName[] createImplicitExpansions() {
IASTName[] result= (IASTName[]) fImplicitMacroExpansions.toArray(new IASTName[fImplicitMacroExpansions.size()]);
fImplicitMacroExpansions.clear();
return result;
}
public ImageLocationInfo[] createImageLocations(TokenList replacement) {
// mstodo- image locations
return ImageLocationInfo.NO_LOCATION_INFOS;
}
public int adjustOffsets(TokenList replacement) {
int offset= 0;
Token l= null;
for (Token t= replacement.first(); t!=null; t= (Token) t.getNext()) {
switch(t.getType()) {
case CPreprocessor.tEXPANDED_IDENTIFIER:
t.setType(IToken.tIDENTIFIER);
break;
case CPreprocessor.tSCOPE_MARKER:
replacement.removeBehind(l);
continue;
}
t.setOffset(offset, ++offset);
l= t;
}
return offset;
}
}

View file

@ -14,8 +14,10 @@ import org.eclipse.cdt.core.dom.ILinkage;
import org.eclipse.cdt.core.dom.ast.IMacroBinding; import org.eclipse.cdt.core.dom.ast.IMacroBinding;
import org.eclipse.cdt.core.dom.ast.IScope; import org.eclipse.cdt.core.dom.ast.IScope;
import org.eclipse.cdt.core.parser.Keywords; import org.eclipse.cdt.core.parser.Keywords;
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
import org.eclipse.cdt.core.parser.util.CharArrayUtils; import org.eclipse.cdt.core.parser.util.CharArrayUtils;
import org.eclipse.cdt.internal.core.dom.Linkage; import org.eclipse.cdt.internal.core.dom.Linkage;
import org.eclipse.cdt.internal.core.parser.scanner.Lexer.LexerOptions;
/** /**
* Models macros used by the preprocessor * Models macros used by the preprocessor
@ -51,10 +53,33 @@ abstract class PreprocessorMacro implements IMacroBinding {
public char[][] getParameterList() { public char[][] getParameterList() {
return null; return null;
} }
public char[][] getParameterPlaceholderList() {
return null;
}
public Object getAdapter(Class clazz) { public Object getAdapter(Class clazz) {
return null; return null;
} }
public String toString() {
char[][] p= getParameterList();
if (p == null) {
return getName();
}
StringBuffer buf= new StringBuffer();
buf.append(getNameCharArray());
buf.append('(');
for (int i = 0; i < p.length; i++) {
if (i>0) {
buf.append(',');
}
buf.append(p[i]);
}
buf.append(')');
return buf.toString();
}
public abstract TokenList getTokens(MacroDefinitionParser parser, LexerOptions lexOptions);
} }
abstract class DynamicStyleMacro extends PreprocessorMacro { abstract class DynamicStyleMacro extends PreprocessorMacro {
@ -63,29 +88,36 @@ abstract class DynamicStyleMacro extends PreprocessorMacro {
super(name); super(name);
} }
public char[] getExpansion() { public char[] getExpansion() {
return getExpansionImage();
}
public char[] getExpansionImage() {
return execute().getCharImage(); return execute().getCharImage();
} }
public abstract Token execute(); public abstract Token execute();
public TokenList getTokens(MacroDefinitionParser mdp, LexerOptions lexOptions) {
TokenList result= new TokenList();
result.append(execute());
return result;
}
} }
class ObjectStyleMacro extends PreprocessorMacro { class ObjectStyleMacro extends PreprocessorMacro {
private static final Token NOT_INITIALIZED = new SimpleToken(0,0,0);
private final char[] fExpansion; private final char[] fExpansion;
final int fExpansionOffset; final int fExpansionOffset;
final int fEndOffset; final int fEndOffset;
// private Token fExpansionTokens; private TokenList fExpansionTokens;
public ObjectStyleMacro(char[] name, char[] expansion) { public ObjectStyleMacro(char[] name, char[] expansion) {
this(name, 0, expansion.length, NOT_INITIALIZED, expansion); this(name, 0, expansion.length, null, expansion);
} }
public ObjectStyleMacro(char[] name, int expansionOffset, int endOffset, Token expansion, char[] source) { public ObjectStyleMacro(char[] name, int expansionOffset, int endOffset, TokenList expansion, char[] source) {
super(name); super(name);
fExpansionOffset= expansionOffset; fExpansionOffset= expansionOffset;
fEndOffset= endOffset; fEndOffset= endOffset;
fExpansion= source; fExpansion= source;
// fExpansionTokens= expansion; fExpansionTokens= expansion;
} }
public int findParameter(char[] tokenImage) { public int findParameter(char[] tokenImage) {
@ -93,14 +125,47 @@ class ObjectStyleMacro extends PreprocessorMacro {
} }
public char[] getExpansion() { public char[] getExpansion() {
TokenList tl= getTokens(new MacroDefinitionParser(), new LexerOptions());
StringBuffer buf= new StringBuffer();
Token t= tl.first();
if (t == null) {
return CharArrayUtils.EMPTY;
}
int endOffset= t.getOffset();
for (; t != null; t= (Token) t.getNext()) {
if (endOffset < t.getOffset()) {
buf.append(' ');
}
buf.append(t.getCharImage());
endOffset= t.getEndOffset();
}
final int length= buf.length();
final char[] expansion= new char[length];
buf.getChars(0, length, expansion, 0);
return expansion;
}
public char[] getExpansionImage() {
final int length = fEndOffset - fExpansionOffset; final int length = fEndOffset - fExpansionOffset;
if (length == fExpansion.length) { if (length == fExpansion.length) {
return fExpansion; return fExpansion;
} }
char[] result= new char[length]; char[] result= new char[length];
System.arraycopy(fExpansion, fEndOffset, result, 0, length); System.arraycopy(fExpansion, fExpansionOffset, result, 0, length);
return result; return result;
} }
public TokenList getTokens(MacroDefinitionParser mdp, LexerOptions lexOptions) {
if (fExpansionTokens == null) {
fExpansionTokens= new TokenList();
Lexer lex= new Lexer(fExpansion, fExpansionOffset, fEndOffset, lexOptions, ILexerLog.NULL);
try {
mdp.parseExpansion(lex, ILexerLog.NULL, getNameCharArray(), getParameterPlaceholderList(), fExpansionTokens);
} catch (OffsetLimitReachedException e) {
}
}
return fExpansionTokens;
}
} }
@ -120,13 +185,35 @@ class FunctionStyleMacro extends ObjectStyleMacro {
} }
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, int expansionFileOffset, int endFileOffset, public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, int expansionFileOffset, int endFileOffset,
Token expansion, char[] source) { TokenList expansion, char[] source) {
super(name, expansionFileOffset, endFileOffset, expansion, source); super(name, expansionFileOffset, endFileOffset, expansion, source);
fParamList = paramList; fParamList = paramList;
fHasVarArgs= hasVarArgs; fHasVarArgs= hasVarArgs;
} }
public char[][] getParameterList() { public char[][] getParameterList() {
final int length = fParamList.length;
if (fHasVarArgs == NO_VAARGS || length==0) {
return fParamList;
}
char[][] result= new char[length][];
System.arraycopy(fParamList, 0, result, 0, length-1);
if (fHasVarArgs == VAARGS) {
result[length-1] = Keywords.cVA_ARGS;
}
else {
final char[] param= fParamList[length-1];
final int plen= param.length;
final int elen = Keywords.cpELLIPSIS.length;
final char[] rp= new char[plen+elen];
System.arraycopy(param, 0, rp, 0, plen);
System.arraycopy(Keywords.cpELLIPSIS, 0, rp, plen, elen);
result[length-1]= rp;
}
return result;
}
public char[][] getParameterPlaceholderList() {
return fParamList; return fParamList;
} }

View file

@ -74,6 +74,15 @@ abstract class ScannerContext {
public abstract boolean changeBranch(Integer state); public abstract boolean changeBranch(Integer state);
public abstract Token currentPPToken(); /**
* Returns the current token from this context. When called before calling {@link #nextPPToken()}
* a token of type {@link Lexer#tBEFORE_INPUT} will be returned.
* @since 5.0
*/
public abstract Token currentLexerToken();
/**
* Returns the next token from this context.
*/
public abstract Token nextPPToken() throws OffsetLimitReachedException; public abstract Token nextPPToken() throws OffsetLimitReachedException;
} }

View file

@ -33,7 +33,7 @@ public class ScannerContextFile extends ScannerContext {
fLexer= lexer; fLexer= lexer;
} }
public Token currentPPToken() { public Token currentLexerToken() {
return fLexer.currentToken(); return fLexer.currentToken();
} }

View file

@ -0,0 +1,49 @@
/*******************************************************************************
* Copyright (c) 2007 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Markus Schorn - initial API and implementation
*******************************************************************************/
package org.eclipse.cdt.internal.core.parser.scanner;
public class ScannerContextMacroExpansion extends ScannerContext {
private static final Token END_TOKEN = new SimpleToken(Lexer.tEND_OF_INPUT, 0, 0);
private Token fTokens;
public ScannerContextMacroExpansion(ILocationCtx ctx, ScannerContext parent, TokenList tokens) {
super(ctx, parent);
fTokens= tokens.first();
}
public boolean changeBranch(Integer state) {
return false;
}
public Token currentLexerToken() {
Token t= fTokens;
if (t == null) {
return END_TOKEN;
}
return t;
}
public Lexer getLexerForPPDirective() {
return null;
}
public Token nextPPToken() {
fTokens= (Token) fTokens.getNext();
return currentLexerToken();
}
public boolean expandsMacros() {
return false;
}
}

View file

@ -36,13 +36,14 @@ public final class ScannerContextPPDirective extends ScannerContext {
public ScannerContextPPDirective(Lexer lexer, boolean convertDefinedToken) { public ScannerContextPPDirective(Lexer lexer, boolean convertDefinedToken) {
super(null, null); super(null, null);
fLexer= lexer; fLexer= lexer;
fConvertDefinedToken= convertDefinedToken;
final Token currentToken = lexer.currentToken(); final Token currentToken = lexer.currentToken();
fLastEndOffset= currentToken.getOffset(); fLastEndOffset= currentToken.getOffset();
fToken= convertToken(currentToken); fToken= convertToken(currentToken);
fConvertDefinedToken= convertDefinedToken;
} }
public Token currentPPToken() { public Token currentLexerToken() {
return fToken; return fToken;
} }
@ -69,10 +70,10 @@ public final class ScannerContextPPDirective extends ScannerContext {
private Token convertToken(Token t) { private Token convertToken(Token t) {
switch (t.getType()) { switch (t.getType()) {
case Lexer.tNEWLINE: case Lexer.tNEWLINE:
t= new SimpleToken(Lexer.tEND_OF_INPUT, fToken.getEndOffset(), fToken.getEndOffset()); t= new SimpleToken(Lexer.tEND_OF_INPUT, t.getEndOffset(), t.getEndOffset());
break; break;
case IToken.tIDENTIFIER: case IToken.tIDENTIFIER:
if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, fToken.getCharImage())) { if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
t.setType(CPreprocessor.tDEFINED); t.setType(CPreprocessor.tDEFINED);
fPreventMacroExpansion= STATE_DEFINED; fPreventMacroExpansion= STATE_DEFINED;
} }
@ -109,10 +110,6 @@ public final class ScannerContextPPDirective extends ScannerContext {
return fPreventMacroExpansion == 0; return fPreventMacroExpansion == 0;
} }
public void setInsideIncludeDirective() {
fLexer.setInsideIncludeDirective();
}
public int getLastEndOffset() { public int getLastEndOffset() {
return fLastEndOffset; return fLastEndOffset;
} }

View file

@ -17,11 +17,10 @@ import org.eclipse.cdt.core.parser.IToken;
* them on to the parsers. * them on to the parsers.
* @since 5.0 * @since 5.0
*/ */
public abstract class Token implements IToken { public abstract class Token implements IToken, Cloneable {
private int fKind; private int fKind;
private int fOffset; private int fOffset;
private int fEndOffset; private int fEndOffset;
private IToken fNextToken; private IToken fNextToken;
Token(int kind, int offset, int endOffset) { Token(int kind, int offset, int endOffset) {
@ -59,9 +58,18 @@ public abstract class Token implements IToken {
fNextToken= t; fNextToken= t;
} }
public void setOffset(int offset, int endOffset) {
fOffset= offset;
fEndOffset= endOffset;
}
public abstract char[] getCharImage(); public abstract char[] getCharImage();
public String toString() {
return getImage();
}
public boolean isOperator() { public boolean isOperator() {
return TokenUtil.isOperator(fKind); return TokenUtil.isOperator(fKind);
} }
@ -70,29 +78,36 @@ public abstract class Token implements IToken {
return new String(getCharImage()); return new String(getCharImage());
} }
public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
return null;
}
}
public char[] getFilename() { public char[] getFilename() {
// mstodo // mstodo- parser removal
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
public boolean looksLikeExpression() { public boolean looksLikeExpression() {
// mstodo // mstodo- parser removal
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
public boolean canBeAPrefix() { public boolean canBeAPrefix() {
// mstodo // mstodo- parser removal
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
public int getLineNumber() { public int getLineNumber() {
// mstodo // mstodo- parser removal
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
public boolean isPointer() { public boolean isPointer() {
// mstodo // mstodo- parser removal
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
} }
@ -107,6 +122,23 @@ class SimpleToken extends Token {
} }
} }
class PlaceHolderToken extends ImageToken {
private final int fIndex;
public PlaceHolderToken(int type, int idx, int offset, int endOffset, char[] name) {
super(type, offset, endOffset, name);
fIndex= idx;
}
public int getIndex() {
return fIndex;
}
public String toString() {
return "[" + fIndex + "]"; //$NON-NLS-1$ //$NON-NLS-2$
}
}
class DigraphToken extends Token { class DigraphToken extends Token {
public DigraphToken(int kind, int offset, int endOffset) { public DigraphToken(int kind, int offset, int endOffset) {
super(kind, offset, endOffset); super(kind, offset, endOffset);
@ -148,6 +180,11 @@ class SourceImageToken extends Token {
} }
return fImage; return fImage;
} }
public void setOffset(int offset, int endOffset) {
getCharImage();
super.setOffset(offset, endOffset);
}
} }

View file

@ -0,0 +1,85 @@
/*******************************************************************************
* Copyright (c) 2007 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Markus Schorn - initial API and implementation
*******************************************************************************/
package org.eclipse.cdt.internal.core.parser.scanner;
class TokenList {
private Token fFirst;
private Token fLast;
final Token removeFirst() {
final Token first= fFirst;
if (first == fLast) {
fFirst= fLast= null;
return first;
}
else {
fFirst= (Token) first.getNext();
return first;
}
}
final public void append(Token t) {
if (fFirst == null) {
fFirst= fLast= t;
}
else {
fLast.setNext(t);
fLast= t;
}
t.setNext(null);
}
final public void prepend(TokenList prepend) {
final Token first= prepend.fFirst;
if (first != null) {
final Token last= prepend.fLast;
last.setNext(fFirst);
fFirst= first;
if (fLast == null) {
fLast= last;
}
}
}
final public TokenList cloneTokens() {
TokenList result= new TokenList();
for (Token t= fFirst; t != null; t= (Token) t.getNext()) {
result.append((Token) t.clone());
}
return result;
}
final public Token first() {
return fFirst;
}
public void removeBehind(Token l) {
if (l == null) {
Token t= fFirst;
if (t != null) {
t= (Token) t.getNext();
fFirst= t;
if (t == null) {
fLast= null;
}
}
}
else {
final Token r= (Token) l.getNext();
if (r != null) {
l.setNext(r.getNext());
if (r == fLast) {
fLast= l;
}
}
}
}
}

View file

@ -118,7 +118,6 @@ public class TokenUtil {
case IToken.tDOT: return Keywords.cpDOT; case IToken.tDOT: return Keywords.cpDOT;
case IToken.tDIVASSIGN: return Keywords.cpDIVASSIGN; case IToken.tDIVASSIGN: return Keywords.cpDIVASSIGN;
case IToken.tDIV: return Keywords.cpDIV; case IToken.tDIV: return Keywords.cpDIV;
case IToken.tBACKSLASH: return Keywords.cpBACKSLASH;
case IGCCToken.tMIN: return Keywords.cpMIN; case IGCCToken.tMIN: return Keywords.cpMIN;
case IGCCToken.tMAX: return Keywords.cpMAX; case IGCCToken.tMAX: return Keywords.cpMAX;

View file

@ -69,4 +69,12 @@ public class MacroBinding extends PlatformObject implements IMacroBinding {
} }
return null; return null;
} }
public char[] getExpansionImage() {
throw new UnsupportedOperationException();
}
public char[][] getParameterPlaceholderList() {
throw new UnsupportedOperationException();
}
} }