mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
Related to 205272, preprocessor and location map without macro-expansion. Not yet tested.
This commit is contained in:
parent
f80b1e7e37
commit
d4e9478c8e
24 changed files with 4906 additions and 140 deletions
|
@ -44,6 +44,7 @@ public class LexerTests extends BaseTestCase {
|
|||
private void init(String input) throws Exception {
|
||||
fLog.clear();
|
||||
fLexer= new Lexer(input.toCharArray(), new LexerOptions(), fLog);
|
||||
fLog.setInput(input);
|
||||
fLexer.nextToken();
|
||||
fLastEndOffset= 0;
|
||||
}
|
||||
|
@ -68,7 +69,7 @@ public class LexerTests extends BaseTestCase {
|
|||
assertEquals(fLastEndOffset, t.getOffset());
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
if (image != null) {
|
||||
assertEquals(image, new String(t.getTokenImage()));
|
||||
assertEquals(image, new String(t.getCharImage()));
|
||||
}
|
||||
fLexer.nextToken();
|
||||
}
|
||||
|
@ -445,7 +446,7 @@ public class LexerTests extends BaseTestCase {
|
|||
init(instertLineSplices(input, splices));
|
||||
for (int i = 0; i < tokens.length; i++) {
|
||||
Token token= fLexer.currentToken();
|
||||
buf.append(token.getTokenImage());
|
||||
buf.append(token.getCharImage());
|
||||
token(tokens[i]);
|
||||
}
|
||||
eof();
|
||||
|
|
|
@ -18,13 +18,18 @@ public class TestLexerLog implements ILexerLog {
|
|||
|
||||
private ArrayList fComments= new ArrayList();
|
||||
private ArrayList fProblems= new ArrayList();
|
||||
private String fInput;
|
||||
|
||||
public void handleComment(boolean isBlockComment, char[] source, int offset, int endOffset) {
|
||||
fComments.add(new String(source, offset, endOffset-offset));
|
||||
public void setInput(String input) {
|
||||
fInput= input;
|
||||
}
|
||||
|
||||
public void handleProblem(int problemID, char[] source, int offset, int endOffset) {
|
||||
fProblems.add(createString(problemID, new String(source, offset, endOffset-offset)));
|
||||
public void handleComment(boolean isBlockComment, int offset, int endOffset) {
|
||||
fComments.add(fInput.substring(offset, endOffset));
|
||||
}
|
||||
|
||||
public void handleProblem(int problemID, char[] arg, int offset, int endOffset) {
|
||||
fProblems.add(createString(problemID, new String(arg)));
|
||||
}
|
||||
|
||||
public String createString(int problemID, String image) {
|
||||
|
|
|
@ -0,0 +1,120 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ILinkage;
|
||||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.dom.ast.IBinding;
|
||||
import org.eclipse.cdt.internal.core.dom.Linkage;
|
||||
|
||||
/**
|
||||
* Models IASTNames as needed for the preprocessor statements and macro expansions.
|
||||
* @since 5.0
|
||||
*/
|
||||
class ASTPreprocessorName extends ASTPreprocessorNode implements IASTName {
|
||||
private final char[] fName;
|
||||
private final IBinding fBinding;
|
||||
public ASTPreprocessorName(IASTNode parent, ASTNodeProperty property, int startNumber, int endNumber, char[] name, IBinding binding) {
|
||||
super(parent, property, startNumber, endNumber);
|
||||
fName= name;
|
||||
fBinding= binding;
|
||||
}
|
||||
|
||||
public IBinding resolveBinding() {
|
||||
return fBinding;
|
||||
}
|
||||
public IBinding getBinding() {
|
||||
return fBinding;
|
||||
}
|
||||
public ILinkage getLinkage() {
|
||||
return Linkage.NO_LINKAGE;
|
||||
}
|
||||
public IASTCompletionContext getCompletionContext() {
|
||||
return null;
|
||||
}
|
||||
public boolean isDeclaration() {
|
||||
return false;
|
||||
}
|
||||
public boolean isDefinition() {
|
||||
return false;
|
||||
}
|
||||
public boolean isReference() {
|
||||
return false;
|
||||
}
|
||||
public char[] toCharArray() {
|
||||
return fName;
|
||||
}
|
||||
public String toString() {
|
||||
return new String(fName);
|
||||
}
|
||||
public void setBinding(IBinding binding) {assert false;}
|
||||
}
|
||||
|
||||
class ASTBuiltinName extends ASTPreprocessorName {
|
||||
final private String fFilename;
|
||||
|
||||
public ASTBuiltinName(IASTNode parent, ASTNodeProperty property, String filename, int startNumber, int endNumber, char[] name, IBinding binding) {
|
||||
super(parent, property, startNumber, endNumber, name, binding);
|
||||
fFilename= filename;
|
||||
}
|
||||
|
||||
public boolean contains(IASTNode node) {
|
||||
return node==this;
|
||||
}
|
||||
|
||||
public String getContainingFilename() {
|
||||
return fFilename;
|
||||
}
|
||||
|
||||
public IASTFileLocation getFileLocation() {
|
||||
// mstodo Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public IASTNodeLocation[] getNodeLocations() {
|
||||
// mstodo Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public String getRawSignature() {
|
||||
return toString();
|
||||
}
|
||||
}
|
||||
|
||||
class ASTMacroReferenceName extends ASTPreprocessorName {
|
||||
public ASTMacroReferenceName(IASTNode parent, IPreprocessorMacro macro, ImageLocationInfo imgLocationInfo) {
|
||||
super(parent, IASTTranslationUnit.EXPANSION_NAME, 0, 0, macro.getNameCharArray(), macro);
|
||||
}
|
||||
|
||||
public String getContainingFilename() {
|
||||
return getTranslationUnit().getContainingFilename();
|
||||
}
|
||||
|
||||
public String getRawSignature() {
|
||||
return toString();
|
||||
}
|
||||
|
||||
public boolean isReference() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// mstodo once names support image-locations, return correct ones here.
|
||||
}
|
|
@ -0,0 +1,374 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFunctionStyleMacroParameter;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElifStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElseStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorEndifStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorErrorStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorFunctionStyleMacroDefinition;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfdefStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIfndefStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorPragmaStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorUndefStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree.IASTInclusionNode;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.ASTNode;
|
||||
|
||||
/**
|
||||
* Models various AST-constructs obtained from the preprocessor.
|
||||
* @since 5.0
|
||||
*/
|
||||
abstract class ASTPreprocessorNode extends ASTNode {
|
||||
public ASTPreprocessorNode(IASTNode parent, ASTNodeProperty property, int startNumber, int endNumber) {
|
||||
setParent(parent);
|
||||
setPropertyInParent(property);
|
||||
setOffset(startNumber);
|
||||
setLength(endNumber-startNumber);
|
||||
}
|
||||
|
||||
protected String getSource(int offset, int length) {
|
||||
final IASTTranslationUnit tu= getTranslationUnit();
|
||||
IASTNodeLocation[] loc=tu.getLocationInfo(offset, length);
|
||||
return tu.getUnpreprocessedSignature(loc);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
class ASTComment extends ASTPreprocessorNode implements IASTComment {
|
||||
private final boolean fIsBlockComment;
|
||||
public ASTComment(IASTTranslationUnit parent, int startNumber, int endNumber, boolean isBlockComment) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fIsBlockComment= isBlockComment;
|
||||
}
|
||||
|
||||
public char[] getComment() {
|
||||
return getSource(getOffset(), getLength()).toCharArray();
|
||||
}
|
||||
|
||||
public boolean isBlockComment() {
|
||||
return fIsBlockComment;
|
||||
}
|
||||
|
||||
public void setComment(char[] comment) {
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
abstract class ASTDirectiveWithCondition extends ASTPreprocessorNode {
|
||||
private final int fConditionOffset;
|
||||
private final int fConditionLength;
|
||||
private final boolean fActive;
|
||||
public ASTDirectiveWithCondition(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fConditionOffset= condNumber;
|
||||
fConditionLength= condEndNumber-condNumber;
|
||||
fActive= active;
|
||||
}
|
||||
|
||||
public boolean taken() {
|
||||
return fActive;
|
||||
}
|
||||
|
||||
public String getConditionString() {
|
||||
return getSource(fConditionOffset, fConditionLength);
|
||||
}
|
||||
|
||||
public char[] getCondition() {
|
||||
return getConditionString().toCharArray();
|
||||
}
|
||||
}
|
||||
|
||||
class ASTEndif extends ASTPreprocessorNode implements IASTPreprocessorEndifStatement {
|
||||
public ASTEndif(IASTTranslationUnit parent, int startNumber, int endNumber) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTElif extends ASTDirectiveWithCondition implements IASTPreprocessorElifStatement {
|
||||
public ASTElif(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTElse extends ASTPreprocessorNode implements IASTPreprocessorElseStatement {
|
||||
private final boolean fActive;
|
||||
public ASTElse(IASTTranslationUnit parent, int startNumber, int endNumber, boolean active) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fActive= active;
|
||||
}
|
||||
public boolean taken() {
|
||||
return fActive;
|
||||
}
|
||||
}
|
||||
|
||||
class ASTIfndef extends ASTDirectiveWithCondition implements IASTPreprocessorIfndefStatement {
|
||||
public ASTIfndef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTIfdef extends ASTDirectiveWithCondition implements IASTPreprocessorIfdefStatement {
|
||||
public ASTIfdef(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTIf extends ASTDirectiveWithCondition implements IASTPreprocessorIfStatement {
|
||||
public ASTIf(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber, boolean active) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, active);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTError extends ASTDirectiveWithCondition implements IASTPreprocessorErrorStatement {
|
||||
public ASTError(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, true);
|
||||
}
|
||||
|
||||
public char[] getMessage() {
|
||||
return getCondition();
|
||||
}
|
||||
}
|
||||
|
||||
class ASTPragma extends ASTDirectiveWithCondition implements IASTPreprocessorPragmaStatement {
|
||||
public ASTPragma(IASTTranslationUnit parent, int startNumber, int condNumber, int condEndNumber, int endNumber) {
|
||||
super(parent, startNumber, condNumber, condEndNumber, endNumber, true);
|
||||
}
|
||||
|
||||
public char[] getMessage() {
|
||||
return getCondition();
|
||||
}
|
||||
}
|
||||
|
||||
class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreprocessorIncludeStatement {
|
||||
private final IASTName fName;
|
||||
private final String fPath;
|
||||
private final boolean fIsActive;
|
||||
private final boolean fIsResolved;
|
||||
private final boolean fIsSystemInclude;
|
||||
|
||||
public ASTInclusionStatement(IASTTranslationUnit parent, int startNumber, int nameStartNumber, int nameEndNumber, int endNumber,
|
||||
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
||||
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
||||
fIsActive= active;
|
||||
fIsResolved= filePath != null;
|
||||
fIsSystemInclude= !userInclude;
|
||||
}
|
||||
|
||||
public IASTName getName() {
|
||||
return fName;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return fPath;
|
||||
}
|
||||
|
||||
public boolean isActive() {
|
||||
return fIsActive;
|
||||
}
|
||||
|
||||
public boolean isResolved() {
|
||||
return fIsResolved;
|
||||
}
|
||||
|
||||
public boolean isSystemInclude() {
|
||||
return fIsSystemInclude;
|
||||
}
|
||||
}
|
||||
|
||||
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorMacroDefinition {
|
||||
private final IASTName fName;
|
||||
|
||||
/**
|
||||
* Regular constructor.
|
||||
*/
|
||||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
||||
int startNumber, int nameNumber, int nameEndNumber, int expansionNumber, int endNumber) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fName= new ASTPreprocessorName(this, IASTPreprocessorMacroDefinition.MACRO_NAME, nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for built-in macros
|
||||
*/
|
||||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro, String filename, int nameNumber, int nameEndNumber) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, 0, 0);
|
||||
fName= new ASTBuiltinName(this, IASTPreprocessorMacroDefinition.MACRO_NAME, filename, nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
||||
}
|
||||
|
||||
protected IMacroBinding getMacro() {
|
||||
return (IMacroBinding) fName.getBinding();
|
||||
}
|
||||
|
||||
public String getExpansion() {
|
||||
return new String(getMacro().getExpansion());
|
||||
}
|
||||
|
||||
public IASTName getName() {
|
||||
return fName;
|
||||
}
|
||||
|
||||
public int getRoleForName(IASTName n) {
|
||||
return (fName == n) ? r_definition : r_unclear;
|
||||
}
|
||||
|
||||
public void setExpansion(String exp) {assert false;}
|
||||
public void setName(IASTName name) {assert false;}
|
||||
}
|
||||
|
||||
class ASTMacroParameter extends ASTNode implements IASTFunctionStyleMacroParameter {
|
||||
private final String fParameter;
|
||||
|
||||
public ASTMacroParameter(char[] param) {
|
||||
fParameter= new String(param);
|
||||
}
|
||||
|
||||
public String getParameter() {
|
||||
return fParameter;
|
||||
}
|
||||
|
||||
public void setParameter(String value) {assert false;}
|
||||
}
|
||||
|
||||
class ASTFunctionMacro extends ASTMacro implements IASTPreprocessorFunctionStyleMacroDefinition {
|
||||
/**
|
||||
* Regular constructor.
|
||||
*/
|
||||
public ASTFunctionMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
||||
int startNumber, int nameNumber, int nameEndNumber, int expansionNumber, int endNumber) {
|
||||
super(parent, macro, startNumber, nameNumber, nameEndNumber, expansionNumber, endNumber);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for builtins
|
||||
*/
|
||||
public ASTFunctionMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
||||
String filename, int nameNumber, int nameEndNumber) {
|
||||
super(parent, macro, filename, nameNumber, nameEndNumber);
|
||||
}
|
||||
|
||||
public IASTFunctionStyleMacroParameter[] getParameters() {
|
||||
FunctionStyleMacro macro= (FunctionStyleMacro) getMacro();
|
||||
char[][] paramList= macro.getParamList();
|
||||
IASTFunctionStyleMacroParameter[] result= new IASTFunctionStyleMacroParameter[paramList.length];
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
result[i]= new ASTMacroParameter(paramList[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void addParameter(IASTFunctionStyleMacroParameter parm) {assert false;}
|
||||
}
|
||||
|
||||
|
||||
class ASTUndef extends ASTPreprocessorNode implements IASTPreprocessorUndefStatement {
|
||||
private final IASTName fName;
|
||||
public ASTUndef(IASTTranslationUnit parent, char[] name, int startNumber, int nameNumber, int nameEndNumber, int endNumber) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fName= new ASTPreprocessorName(this, IASTPreprocessorUndefStatement.MACRO_NAME, nameNumber, nameEndNumber, name, null);
|
||||
}
|
||||
|
||||
public IASTName getMacroName() {
|
||||
return fName;
|
||||
}
|
||||
}
|
||||
|
||||
class ASTInclusionNode implements IASTInclusionNode {
|
||||
protected LocationCtx fLocationCtx;
|
||||
private IASTInclusionNode[] fInclusions;
|
||||
|
||||
public ASTInclusionNode(LocationCtx ctx) {
|
||||
fLocationCtx= ctx;
|
||||
}
|
||||
|
||||
public IASTPreprocessorIncludeStatement getIncludeDirective() {
|
||||
return fLocationCtx.getInclusionStatement();
|
||||
}
|
||||
|
||||
public IASTInclusionNode[] getNestedInclusions() {
|
||||
if (fInclusions == null) {
|
||||
ArrayList result= new ArrayList();
|
||||
fLocationCtx.getInclusions(result);
|
||||
fInclusions= (IASTInclusionNode[]) result.toArray(new IASTInclusionNode[result.size()]);
|
||||
}
|
||||
return fInclusions;
|
||||
}
|
||||
}
|
||||
|
||||
class DependencyTree extends ASTInclusionNode implements IDependencyTree {
|
||||
public DependencyTree(LocationCtx ctx) {
|
||||
super(ctx);
|
||||
}
|
||||
|
||||
public IASTInclusionNode[] getInclusions() {
|
||||
return getNestedInclusions();
|
||||
}
|
||||
|
||||
public String getTranslationUnitPath() {
|
||||
return fLocationCtx.getFilename();
|
||||
}
|
||||
}
|
||||
|
||||
class ASTFileLocation implements IASTFileLocation {
|
||||
private String fFilename;
|
||||
private int fOffset;
|
||||
private int fLength;
|
||||
|
||||
public ASTFileLocation(String filename, int startOffset, int length) {
|
||||
fFilename= filename;
|
||||
fOffset= startOffset;
|
||||
fLength= length;
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
return fFilename;
|
||||
}
|
||||
|
||||
public IASTFileLocation asFileLocation() {
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getNodeLength() {
|
||||
return fLength;
|
||||
}
|
||||
|
||||
public int getNodeOffset() {
|
||||
return fOffset;
|
||||
}
|
||||
|
||||
public int getEndingLineNumber() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public int getStartingLineNumber() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,235 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM - Initial API and implementation
|
||||
* Anton Leherbauer (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.ASTNode;
|
||||
import org.eclipse.cdt.internal.core.parser.ParserMessages;
|
||||
|
||||
|
||||
/**
|
||||
* Models the problems found by the preprocessor or lexer.
|
||||
*/
|
||||
class ASTProblem extends ASTNode implements IASTProblem {
|
||||
|
||||
private final int id;
|
||||
private final char[] arg;
|
||||
|
||||
private String message = null;
|
||||
|
||||
public ASTProblem(int id, char[] arg, int startNumber, int endNumber) {
|
||||
this.id = id;
|
||||
this.arg = arg;
|
||||
setOffsetAndLength(startNumber, endNumber-startNumber);
|
||||
}
|
||||
|
||||
public int getID() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isWarning() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
if (message != null)
|
||||
return message;
|
||||
|
||||
String msg = (String) errorMessages.get(new Integer(id));
|
||||
if (msg == null)
|
||||
msg = ""; //$NON-NLS-1$
|
||||
|
||||
if (arg != null) {
|
||||
msg = MessageFormat.format(msg, new Object[] { new String(arg) });
|
||||
}
|
||||
|
||||
IASTFileLocation f = getFileLocation();
|
||||
String file = null;
|
||||
int line = 0;
|
||||
if( f == null )
|
||||
{
|
||||
file = ""; //$NON-NLS-1$
|
||||
} else {
|
||||
file = f.getFileName();
|
||||
line = f.getStartingLineNumber();
|
||||
}
|
||||
Object[] args = new Object[] { msg, file, new Integer(line) };
|
||||
message = ParserMessages.getFormattedString(PROBLEM_PATTERN, args);
|
||||
return message;
|
||||
}
|
||||
|
||||
public boolean checkCategory(int bitmask) {
|
||||
return ((id & bitmask) != 0);
|
||||
}
|
||||
|
||||
public String getArguments() {
|
||||
return arg != null ? String.valueOf(arg) : ""; //$NON-NLS-1$
|
||||
}
|
||||
|
||||
|
||||
protected static final Map errorMessages;
|
||||
static {
|
||||
errorMessages = new HashMap();
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_POUND_ERROR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.error")); //$NON-NLS-1$
|
||||
errorMessages.put(new Integer(IASTProblem.PREPROCESSOR_POUND_WARNING), ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.warning")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_INCLUSION_NOT_FOUND),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.inclusionNotFound")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_DEFINITION_NOT_FOUND),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.definitionNotFound")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_INVALID_MACRO_DEFN),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.invalidMacroDefn")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_INVALID_MACRO_REDEFN),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.invalidMacroRedefn")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_UNBALANCE_CONDITION),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.unbalancedConditional")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(
|
||||
IASTProblem.PREPROCESSOR_CONDITIONAL_EVAL_ERROR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.conditionalEval")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_MACRO_USAGE_ERROR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.macroUsage")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_CIRCULAR_INCLUSION),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.circularInclusion")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_INVALID_DIRECTIVE),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.invalidDirective")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_MACRO_PASTING_ERROR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.macroPasting")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(
|
||||
IASTProblem.PREPROCESSOR_MISSING_RPAREN_PARMLIST),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.missingRParen")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.PREPROCESSOR_INVALID_VA_ARGS),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.preproc.invalidVaArgs")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_INVALID_ESCAPECHAR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.invalidEscapeChar")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_UNBOUNDED_STRING),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.unboundedString")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_FLOATING_POINT),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badFloatingPoint")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_HEX_FORMAT),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badHexFormat")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_OCTAL_FORMAT),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badOctalFormat")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_DECIMAL_FORMAT),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badDecimalFormat")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_ASSIGNMENT_NOT_ALLOWED),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.assignmentNotAllowed")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_DIVIDE_BY_ZERO),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.divideByZero")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_MISSING_R_PAREN),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.missingRParen")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_EXPRESSION_SYNTAX_ERROR),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.expressionSyntaxError")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_ILLEGAL_IDENTIFIER),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.illegalIdentifier")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_CONDITIONAL_EXPRESSION),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badConditionalExpression")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_UNEXPECTED_EOF),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.unexpectedEOF")); //$NON-NLS-1$
|
||||
errorMessages
|
||||
.put(
|
||||
new Integer(IASTProblem.SCANNER_BAD_CHARACTER),
|
||||
ParserMessages
|
||||
.getString("ScannerProblemFactory.error.scanner.badCharacter")); //$NON-NLS-1$
|
||||
errorMessages.put(new Integer(IASTProblem.SYNTAX_ERROR), ParserMessages
|
||||
.getString("ParserProblemFactory.error.syntax.syntaxError")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
protected final static String PROBLEM_PATTERN = "BaseProblemFactory.problemPattern"; //$NON-NLS-1$
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,375 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM Corporation - initial implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
* Bryan Wilkinson (QNX) - https://bugs.eclipse.org/bugs/show_bug.cgi?id=151207
|
||||
* Anton Leherbauer (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
|
||||
|
||||
/**
|
||||
* Used to evaluate expressions in preprocessor directives.
|
||||
* @since 5.0
|
||||
*/
|
||||
class ExpressionEvaluator {
|
||||
static class EvalException extends Exception {
|
||||
private int fProblemID;
|
||||
private char[] fProblemArg;
|
||||
|
||||
public EvalException(int problemID, char[] problemArg) {
|
||||
fProblemID= problemID;
|
||||
}
|
||||
|
||||
public int getProblemID() {
|
||||
return fProblemID;
|
||||
}
|
||||
|
||||
public char[] getProblemArg() {
|
||||
return fProblemArg;
|
||||
}
|
||||
}
|
||||
|
||||
private Token fTokens;
|
||||
private CharArrayObjectMap fDictionary;
|
||||
|
||||
public boolean evaluate(Token condition, CharArrayObjectMap dictionary) throws EvalException {
|
||||
fTokens= condition;
|
||||
fDictionary= dictionary;
|
||||
return expression() != 0;
|
||||
}
|
||||
|
||||
|
||||
private long expression() throws EvalException {
|
||||
return conditionalExpression();
|
||||
}
|
||||
|
||||
private long conditionalExpression() throws EvalException {
|
||||
long r1 = logicalOrExpression();
|
||||
if (LA() == IToken.tQUESTION) {
|
||||
consume();
|
||||
long r2 = expression();
|
||||
if (LA() == IToken.tCOLON)
|
||||
consume();
|
||||
else {
|
||||
throw new EvalException(IProblem.SCANNER_BAD_CONDITIONAL_EXPRESSION, null);
|
||||
}
|
||||
long r3 = conditionalExpression();
|
||||
return r1 != 0 ? r2 : r3;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long logicalOrExpression() throws EvalException {
|
||||
long r1 = logicalAndExpression();
|
||||
while (LA() == IToken.tOR) {
|
||||
consume();
|
||||
long r2 = logicalAndExpression();
|
||||
r1 = ((r1 != 0) || (r2 != 0)) ? 1 : 0;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long logicalAndExpression() throws EvalException {
|
||||
long r1 = inclusiveOrExpression();
|
||||
while (LA() == IToken.tAND) {
|
||||
consume();
|
||||
long r2 = inclusiveOrExpression();
|
||||
r1 = ((r1 != 0) && (r2 != 0)) ? 1 : 0;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long inclusiveOrExpression() throws EvalException {
|
||||
long r1 = exclusiveOrExpression();
|
||||
while (LA() == IToken.tBITOR) {
|
||||
consume();
|
||||
long r2 = exclusiveOrExpression();
|
||||
r1 = r1 | r2;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long exclusiveOrExpression() throws EvalException {
|
||||
long r1 = andExpression();
|
||||
while (LA() == IToken.tXOR) {
|
||||
consume();
|
||||
long r2 = andExpression();
|
||||
r1 = r1 ^ r2;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long andExpression() throws EvalException {
|
||||
long r1 = equalityExpression();
|
||||
while (LA() == IToken.tAMPER) {
|
||||
consume();
|
||||
long r2 = equalityExpression();
|
||||
r1 = r1 & r2;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long equalityExpression() throws EvalException {
|
||||
long r1 = relationalExpression();
|
||||
for (int t = LA(); t == IToken.tEQUAL || t == IToken.tNOTEQUAL; t = LA()) {
|
||||
consume();
|
||||
long r2 = relationalExpression();
|
||||
if (t == IToken.tEQUAL)
|
||||
r1 = (r1 == r2) ? 1 : 0;
|
||||
else
|
||||
// t == tNOTEQUAL
|
||||
r1 = (r1 != r2) ? 1 : 0;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long relationalExpression() throws EvalException {
|
||||
long r1 = shiftExpression();
|
||||
for (int t = LA(); t == IToken.tLT || t == IToken.tLTEQUAL || t == IToken.tGT
|
||||
|| t == IToken.tGTEQUAL; t = LA()) {
|
||||
consume();
|
||||
long r2 = shiftExpression();
|
||||
switch (t) {
|
||||
case IToken.tLT:
|
||||
r1 = (r1 < r2) ? 1 : 0;
|
||||
break;
|
||||
case IToken.tLTEQUAL:
|
||||
r1 = (r1 <= r2) ? 1 : 0;
|
||||
break;
|
||||
case IToken.tGT:
|
||||
r1 = (r1 > r2) ? 1 : 0;
|
||||
break;
|
||||
case IToken.tGTEQUAL:
|
||||
r1 = (r1 >= r2) ? 1 : 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long shiftExpression() throws EvalException {
|
||||
long r1 = additiveExpression();
|
||||
for (int t = LA(); t == IToken.tSHIFTL || t == IToken.tSHIFTR; t = LA()) {
|
||||
consume();
|
||||
long r2 = additiveExpression();
|
||||
if (t == IToken.tSHIFTL)
|
||||
r1 = r1 << r2;
|
||||
else
|
||||
// t == tSHIFTR
|
||||
r1 = r1 >> r2;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long additiveExpression() throws EvalException {
|
||||
long r1 = multiplicativeExpression();
|
||||
for (int t = LA(); t == IToken.tPLUS || t == IToken.tMINUS; t = LA()) {
|
||||
consume();
|
||||
long r2 = multiplicativeExpression();
|
||||
if (t == IToken.tPLUS)
|
||||
r1 = r1 + r2;
|
||||
else
|
||||
// t == tMINUS
|
||||
r1 = r1 - r2;
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long multiplicativeExpression() throws EvalException {
|
||||
long r1 = unaryExpression();
|
||||
for (int t = LA(); t == IToken.tSTAR || t == IToken.tDIV || t == IToken.tMOD; t = LA()) {
|
||||
consume();
|
||||
long r2 = unaryExpression();
|
||||
if (t == IToken.tSTAR)
|
||||
r1 = r1 * r2;
|
||||
else if (r2 != 0) {
|
||||
if (t == IToken.tDIV)
|
||||
r1 = r1 / r2;
|
||||
else
|
||||
r1 = r1 % r2; //tMOD
|
||||
} else {
|
||||
throw new EvalException(IProblem.SCANNER_DIVIDE_BY_ZERO, null);
|
||||
}
|
||||
}
|
||||
return r1;
|
||||
}
|
||||
|
||||
private long unaryExpression() throws EvalException {
|
||||
switch (LA()) {
|
||||
case IToken.tPLUS:
|
||||
consume();
|
||||
return unaryExpression();
|
||||
case IToken.tMINUS:
|
||||
consume();
|
||||
return -unaryExpression();
|
||||
case IToken.tNOT:
|
||||
consume();
|
||||
return unaryExpression() == 0 ? 1 : 0;
|
||||
case IToken.tBITCOMPLEMENT:
|
||||
consume();
|
||||
return ~unaryExpression();
|
||||
case IToken.tCHAR:
|
||||
case IToken.tLCHAR:
|
||||
case IToken.tINTEGER:
|
||||
long val= getValue(fTokens);
|
||||
consume();
|
||||
return val;
|
||||
case CPreprocessor.tDEFINED:
|
||||
return handleDefined();
|
||||
case IToken.tLPAREN:
|
||||
consume();
|
||||
long r1 = expression();
|
||||
if (LA() == IToken.tRPAREN) {
|
||||
consume();
|
||||
return r1;
|
||||
}
|
||||
throw new EvalException(IProblem.SCANNER_MISSING_R_PAREN, null);
|
||||
case IToken.tIDENTIFIER:
|
||||
return 1;
|
||||
default:
|
||||
throw new EvalException(IProblem.SCANNER_EXPRESSION_SYNTAX_ERROR, null);
|
||||
}
|
||||
}
|
||||
|
||||
private long handleDefined() throws EvalException {
|
||||
boolean parenthesis= false;
|
||||
consume();
|
||||
if (LA() == IToken.tLPAREN) {
|
||||
parenthesis= true;
|
||||
consume();
|
||||
}
|
||||
if (LA() != IToken.tIDENTIFIER) {
|
||||
throw new EvalException(IProblem.SCANNER_ILLEGAL_IDENTIFIER, null);
|
||||
}
|
||||
int result= fDictionary.containsKey(fTokens.getCharImage()) ? 1 : 0;
|
||||
consume();
|
||||
if (parenthesis) {
|
||||
if (LA() != IToken.tRPAREN) {
|
||||
throw new EvalException(IProblem.SCANNER_MISSING_R_PAREN, null);
|
||||
}
|
||||
consume();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private int LA() {
|
||||
return fTokens.getType();
|
||||
}
|
||||
|
||||
private void consume() {
|
||||
fTokens= (Token) fTokens.getNext();
|
||||
if (fTokens == null) {
|
||||
fTokens= new SimpleToken(Lexer.tEND_OF_INPUT, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
long getValue(Token t) throws EvalException {
|
||||
switch(t.getType()) {
|
||||
case IToken.tCHAR:
|
||||
return getChar(t.getCharImage(), 1);
|
||||
case IToken.tLCHAR:
|
||||
return getChar(t.getCharImage(), 2);
|
||||
case IToken.tINTEGER:
|
||||
return getNumber(t.getCharImage());
|
||||
}
|
||||
assert false;
|
||||
return 1;
|
||||
}
|
||||
|
||||
private long getNumber(char[] image) throws EvalException {
|
||||
boolean isHex = false;
|
||||
boolean isOctal = false;
|
||||
|
||||
int pos= 0;
|
||||
if (image.length > 1) {
|
||||
if (image[0] == '0') {
|
||||
switch (image[++pos]) {
|
||||
case 'x':
|
||||
case 'X':
|
||||
isHex = true;
|
||||
++pos;
|
||||
break;
|
||||
case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9':
|
||||
isOctal = true;
|
||||
++pos;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isHex) {
|
||||
return getNumber(image, 2, image.length, 16, IProblem.SCANNER_BAD_HEX_FORMAT);
|
||||
}
|
||||
if (isOctal) {
|
||||
return getNumber(image, 1, image.length, 8, IProblem.SCANNER_BAD_OCTAL_FORMAT);
|
||||
}
|
||||
return getNumber(image, 0, image.length, 10, IProblem.SCANNER_BAD_DECIMAL_FORMAT);
|
||||
}
|
||||
|
||||
private long getChar(char[] tokenImage, int i) throws EvalException {
|
||||
if (i>=tokenImage.length) {
|
||||
throw new EvalException(IProblem.SCANNER_BAD_CHARACTER, tokenImage);
|
||||
}
|
||||
final char c= tokenImage[i];
|
||||
if (c != '\\') {
|
||||
return c;
|
||||
}
|
||||
|
||||
if (++i>=tokenImage.length) {
|
||||
throw new EvalException(IProblem.SCANNER_BAD_CHARACTER, tokenImage);
|
||||
}
|
||||
final char d= tokenImage[i];
|
||||
switch(d) {
|
||||
case '\\': case '"': case '\'':
|
||||
return d;
|
||||
case 'a': return 7;
|
||||
case 'b': return '\b';
|
||||
case 'f': return '\f';
|
||||
case 'n': return '\n';
|
||||
case 'r': return '\r';
|
||||
case 't': return '\t';
|
||||
case 'v': return 0xb;
|
||||
|
||||
case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7':
|
||||
return getNumber(tokenImage, i+1, tokenImage.length-1, 8, IProblem.SCANNER_BAD_OCTAL_FORMAT);
|
||||
|
||||
case 'x': case 'u': case 'U':
|
||||
return getNumber(tokenImage, i+1, tokenImage.length-1, 16, IProblem.SCANNER_BAD_HEX_FORMAT);
|
||||
default:
|
||||
throw new EvalException(IProblem.SCANNER_BAD_CHARACTER, tokenImage);
|
||||
}
|
||||
}
|
||||
|
||||
private long getNumber(char[] tokenImage, int from, int to, int base, int problemID) throws EvalException {
|
||||
long result= 0;
|
||||
for (int i = from; i < to; i++) {
|
||||
int digit= getDigit(tokenImage[i]);
|
||||
if (digit >= base) {
|
||||
throw new EvalException(problemID, tokenImage);
|
||||
}
|
||||
result= result*base + digit;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private int getDigit(char c) {
|
||||
switch(c) {
|
||||
case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9':
|
||||
return c-'0';
|
||||
case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
|
||||
return c-'a' + 10;
|
||||
case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
|
||||
return c-'A'+10;
|
||||
}
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
}
|
|
@ -10,9 +10,29 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
|
||||
/**
|
||||
* Interface between the lexer and the preprocessor for picking up warnings and comments.
|
||||
* @since 5.0
|
||||
*/
|
||||
public interface ILexerLog {
|
||||
|
||||
void handleProblem(int problemID, char[] source, int offset, int endOffset);
|
||||
/**
|
||||
* A problem has been detected
|
||||
* @param problemID id as defined in {@link IProblem}
|
||||
* @param info additional info as required for {@link IProblem}.
|
||||
* @param offset The offset of the problem in the source of the lexer.
|
||||
* @param endOffset end offset of the problem in the source of the lexer.
|
||||
*/
|
||||
void handleProblem(int problemID, char[] info, int offset, int endOffset);
|
||||
|
||||
void handleComment(boolean isBlockComment, char[] source, int offset, int endOffsetLast);
|
||||
/**
|
||||
* A comment has been detected
|
||||
* @param isBlockComment <code>true</code> for block-comments, <code>false</code> for line-comments.
|
||||
* @param source the input of the lexer.
|
||||
* @param offset the offset where the comment starts
|
||||
* @param endOffset the offset where the comment ends
|
||||
*/
|
||||
void handleComment(boolean isBlockComment, int offset, int endOffset);
|
||||
}
|
||||
|
|
|
@ -8,17 +8,14 @@
|
|||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
class CompletionTokenException extends Exception {
|
||||
/**
|
||||
* Interface for modeling contexts that can deal with offsets. These are:
|
||||
* synthetic contexts used for pre-included files, file-contexts, macro-expansions.
|
||||
* @since 5.0
|
||||
*/
|
||||
public interface ILocationCtx {
|
||||
|
||||
private Token fToken;
|
||||
|
||||
public CompletionTokenException(Token token) {
|
||||
fToken= token;
|
||||
}
|
||||
|
||||
public Token getToken() {
|
||||
return fToken;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree;
|
||||
|
||||
|
||||
/**
|
||||
* Interface between the ast and the location-resolver for resolving offsets.
|
||||
* @since 5.0
|
||||
*/
|
||||
public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.scanner2.ILocationResolver {
|
||||
|
||||
/**
|
||||
* Introduces the ast translation unit to the location resolver. Must be called before any tokens from the
|
||||
* scanner are obtained.
|
||||
*/
|
||||
void setRootNode(IASTTranslationUnit tu);
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getAllPreprocessorStatements()
|
||||
*/
|
||||
IASTPreprocessorStatement [] getAllPreprocessorStatements();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getMacroDefinitions()
|
||||
*/
|
||||
IASTPreprocessorMacroDefinition [] getMacroDefinitions();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getBuiltinMacroDefinitions()
|
||||
*/
|
||||
IASTPreprocessorMacroDefinition [] getBuiltinMacroDefinitions();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getIncludeDirectives()
|
||||
*/
|
||||
IASTPreprocessorIncludeStatement [] getIncludeDirectives();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getPreprocessorProblems()
|
||||
*/
|
||||
IASTProblem[] getScannerProblems();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getFilePath()
|
||||
*/
|
||||
public String getTranslationUnitPath();
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getContainingFilename()
|
||||
*/
|
||||
public String getContainingFilename(int offset);
|
||||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getDependencyTree()
|
||||
*/
|
||||
public IDependencyTree getDependencyTree();
|
||||
|
||||
/**
|
||||
* Returns explicit and implicit references for a macro.
|
||||
*/
|
||||
public IASTName[] getReferences(IMacroBinding binding);
|
||||
|
||||
/**
|
||||
* Returns the definition for a macro.
|
||||
*/
|
||||
public IASTName[] getDeclarations(IMacroBinding binding);
|
||||
}
|
|
@ -10,12 +10,12 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
class DigraphToken extends Token {
|
||||
public DigraphToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
}
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
|
||||
/**
|
||||
* Interface for the location map when using the macros from the preprocessor.
|
||||
* @since 5.0
|
||||
*/
|
||||
public interface IPreprocessorMacro extends ILocationCtx, IMacroBinding {
|
||||
|
||||
public char[] getTokenImage() {
|
||||
return TokenUtil.getDigraphImage(getType());
|
||||
}
|
||||
}
|
|
@ -10,12 +10,12 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
class SimpleToken extends Token {
|
||||
public SimpleToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
}
|
||||
/**
|
||||
* Information needed for computing image-locations. An image location exists for a name and describes where the name
|
||||
* came from. This can be: source code, macro-expansion, parameter to macro-expansion or synthetic.
|
||||
*
|
||||
* @since 5.0
|
||||
*/
|
||||
class ImageLocationInfo {
|
||||
|
||||
public char[] getTokenImage() {
|
||||
return TokenUtil.getImage(getType());
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@ import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
|||
import org.eclipse.cdt.core.parser.IGCCToken;
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
/**
|
||||
* In short this class converts line endings (to '\n') and trigraphs
|
||||
|
@ -36,6 +37,7 @@ import org.eclipse.cdt.core.parser.IToken;
|
|||
* an execution character-set is performed.
|
||||
*/
|
||||
final public class Lexer {
|
||||
public static final int tBEFORE_INPUT = IToken.FIRST_RESERVED_SCANNER;
|
||||
public static final int tNEWLINE = IToken.FIRST_RESERVED_SCANNER + 1;
|
||||
public static final int tEND_OF_INPUT = IToken.FIRST_RESERVED_SCANNER + 2;
|
||||
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
||||
|
@ -43,11 +45,20 @@ final public class Lexer {
|
|||
|
||||
private static final int END_OF_INPUT = -1;
|
||||
private static final int LINE_SPLICE_SEQUENCE = -2;
|
||||
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
||||
|
||||
public static class LexerOptions {
|
||||
public final static class LexerOptions implements Cloneable {
|
||||
public boolean fSupportDollarInitializers= true;
|
||||
public boolean fSupportMinAndMax= true;
|
||||
public boolean fSupportContentAssist= false;
|
||||
|
||||
public Object clone() {
|
||||
try {
|
||||
return super.clone();
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// configuration
|
||||
|
@ -56,7 +67,7 @@ final public class Lexer {
|
|||
|
||||
// the input to the lexer
|
||||
private final char[] fInput;
|
||||
private final int fLimit;
|
||||
private int fLimit;
|
||||
|
||||
// after phase 3 (newline, trigraph, line-splice)
|
||||
private int fOffset;
|
||||
|
@ -64,12 +75,13 @@ final public class Lexer {
|
|||
private int fCharPhase3;
|
||||
|
||||
private boolean fInsideIncludeDirective= false;
|
||||
private Token fToken;
|
||||
private Token fToken= new SimpleToken(tBEFORE_INPUT, 0, 0);
|
||||
|
||||
// for the few cases where we have to lookahead more than one character
|
||||
private int fMarkOffset;
|
||||
private int fMarkEndOffset;
|
||||
private int fMarkPrefetchedChar;
|
||||
private boolean fFirstTokenAfterNewline= true;
|
||||
|
||||
|
||||
public Lexer(char[] input, LexerOptions options, ILexerLog log) {
|
||||
|
@ -88,6 +100,17 @@ final public class Lexer {
|
|||
nextCharPhase3();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the lexer to the first char and prepares for content-assist mode.
|
||||
*/
|
||||
public void setContentAssistMode(int offset) {
|
||||
fOptions.fSupportContentAssist= true;
|
||||
fLimit= Math.min(fLimit, fInput.length);
|
||||
// re-initialize
|
||||
fOffset= fEndOffset= 0;
|
||||
nextCharPhase3();
|
||||
}
|
||||
|
||||
/**
|
||||
* Call this before consuming the name-token in the include directive. It causes the header-file
|
||||
* tokens to be created.
|
||||
|
@ -105,18 +128,53 @@ final public class Lexer {
|
|||
|
||||
/**
|
||||
* Advances to the next token, skipping whitespace other than newline.
|
||||
* @throws CompletionTokenException when completion is requested in a literal or an header-name.
|
||||
* @throws OffsetLimitReachedException when completion is requested in a literal or a header-name.
|
||||
*/
|
||||
public Token nextToken() throws CompletionTokenException {
|
||||
public Token nextToken() throws OffsetLimitReachedException {
|
||||
fFirstTokenAfterNewline= fToken.getType() == tNEWLINE;
|
||||
return fToken= fetchToken();
|
||||
}
|
||||
|
||||
public boolean currentTokenIsFirstOnLine() {
|
||||
return fFirstTokenAfterNewline;
|
||||
}
|
||||
|
||||
/**
|
||||
* Advances to the next newline.
|
||||
* @return the end offset of the last token before the newline or the start of the newline
|
||||
* if there were no other tokens.
|
||||
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
|
||||
* @since 5.0
|
||||
*/
|
||||
public final int consumeLine(int origin) throws OffsetLimitReachedException {
|
||||
Token t= fToken;
|
||||
Token lt= null;
|
||||
while(true) {
|
||||
switch(t.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
fToken= t;
|
||||
throw new OffsetLimitReachedException(origin, t);
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
fToken= t;
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(origin, lt);
|
||||
}
|
||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
||||
case Lexer.tNEWLINE:
|
||||
fToken= t;
|
||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
||||
}
|
||||
lt= t;
|
||||
t= fetchToken();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Advances to the next pound token that starts a preprocessor directive.
|
||||
* @return pound token of the directive or end-of-input.
|
||||
* @throws CompletionTokenException when completion is requested in a literal or an header-name.
|
||||
* @throws OffsetLimitReachedException when completion is requested in a literal or an header-name.
|
||||
*/
|
||||
public Token nextDirective() throws CompletionTokenException {
|
||||
public Token nextDirective() throws OffsetLimitReachedException {
|
||||
Token t= fToken;
|
||||
boolean haveNL= t==null || t.getType() == tNEWLINE;
|
||||
loop: while(true) {
|
||||
|
@ -147,7 +205,7 @@ final public class Lexer {
|
|||
/**
|
||||
* Computes the next token.
|
||||
*/
|
||||
private Token fetchToken() throws CompletionTokenException {
|
||||
private Token fetchToken() throws OffsetLimitReachedException {
|
||||
while(true) {
|
||||
final int start= fOffset;
|
||||
final int c= fCharPhase3;
|
||||
|
@ -454,7 +512,7 @@ final public class Lexer {
|
|||
break;
|
||||
}
|
||||
|
||||
handleProblem(IASTProblem.SCANNER_BAD_CHARACTER, start);
|
||||
handleProblem(IASTProblem.SCANNER_BAD_CHARACTER, new char[] {(char) c}, start);
|
||||
// loop is continued, character is treated as white-space.
|
||||
}
|
||||
}
|
||||
|
@ -467,15 +525,20 @@ final public class Lexer {
|
|||
return new DigraphToken(kind, offset, fOffset);
|
||||
}
|
||||
|
||||
private Token newToken(int kind, int offset, int length) {
|
||||
return new TokenWithImage(kind, this, offset, fOffset, length);
|
||||
private Token newToken(int kind, int offset, int imageLength) {
|
||||
final int endOffset= fOffset;
|
||||
int sourceLen= endOffset-offset;
|
||||
if (sourceLen != imageLength) {
|
||||
return new ImageToken(kind, offset, endOffset, getCharImage(offset, endOffset, imageLength));
|
||||
}
|
||||
return new SourceImageToken(kind, offset, endOffset, fInput);
|
||||
}
|
||||
|
||||
private void handleProblem(int problemID, int offset) {
|
||||
fLog.handleProblem(problemID, fInput, offset, fOffset);
|
||||
private void handleProblem(int problemID, char[] arg, int offset) {
|
||||
fLog.handleProblem(problemID, arg, offset, fOffset);
|
||||
}
|
||||
|
||||
private Token headerName(final int start, final boolean expectQuotes) throws CompletionTokenException {
|
||||
private Token headerName(final int start, final boolean expectQuotes) throws OffsetLimitReachedException {
|
||||
int length= 1;
|
||||
boolean done = false;
|
||||
int c= fCharPhase3;
|
||||
|
@ -483,12 +546,12 @@ final public class Lexer {
|
|||
switch (c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new CompletionTokenException(
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||
newToken((expectQuotes ? tQUOTE_HEADER_NAME : tSYSTEM_HEADER_NAME), start, length));
|
||||
}
|
||||
// no break;
|
||||
case '\n':
|
||||
handleProblem(IProblem.SCANNER_UNBOUNDED_STRING, start);
|
||||
handleProblem(IProblem.SCANNER_UNBOUNDED_STRING, getInputChars(start, fOffset), start);
|
||||
break loop;
|
||||
|
||||
case '"':
|
||||
|
@ -509,13 +572,13 @@ final public class Lexer {
|
|||
while(true) {
|
||||
switch (c) {
|
||||
case END_OF_INPUT:
|
||||
fLog.handleComment(true, fInput, start, fOffset);
|
||||
fLog.handleComment(true, start, fOffset);
|
||||
return;
|
||||
case '*':
|
||||
c= nextCharPhase3();
|
||||
if (c == '/') {
|
||||
nextCharPhase3();
|
||||
fLog.handleComment(true, fInput, start, fOffset);
|
||||
fLog.handleComment(true, start, fOffset);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
|
@ -532,14 +595,14 @@ final public class Lexer {
|
|||
switch (c) {
|
||||
case END_OF_INPUT:
|
||||
case '\n':
|
||||
fLog.handleComment(false, fInput, start, fOffset);
|
||||
fLog.handleComment(false, start, fOffset);
|
||||
return;
|
||||
}
|
||||
c= nextCharPhase3();
|
||||
}
|
||||
}
|
||||
|
||||
private Token stringLiteral(final int start, final boolean wide) throws CompletionTokenException {
|
||||
private Token stringLiteral(final int start, final boolean wide) throws OffsetLimitReachedException {
|
||||
boolean escaped = false;
|
||||
boolean done = false;
|
||||
int length= wide ? 2 : 1;
|
||||
|
@ -549,11 +612,11 @@ final public class Lexer {
|
|||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new CompletionTokenException(newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length));
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length));
|
||||
}
|
||||
// no break;
|
||||
case '\n':
|
||||
handleProblem(IProblem.SCANNER_UNBOUNDED_STRING, start);
|
||||
handleProblem(IProblem.SCANNER_UNBOUNDED_STRING, getInputChars(start, fOffset), start);
|
||||
break loop;
|
||||
|
||||
case '\\':
|
||||
|
@ -575,7 +638,7 @@ final public class Lexer {
|
|||
return newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length);
|
||||
}
|
||||
|
||||
private Token charLiteral(final int start, boolean wide) throws CompletionTokenException {
|
||||
private Token charLiteral(final int start, boolean wide) throws OffsetLimitReachedException {
|
||||
boolean escaped = false;
|
||||
boolean done = false;
|
||||
int length= wide ? 2 : 1;
|
||||
|
@ -585,11 +648,11 @@ final public class Lexer {
|
|||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new CompletionTokenException(newToken(wide ? IToken.tLCHAR : IToken.tCHAR, start, length));
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLCHAR : IToken.tCHAR, start, length));
|
||||
}
|
||||
// no break;
|
||||
case '\n':
|
||||
handleProblem(IProblem.SCANNER_BAD_CHARACTER, start);
|
||||
handleProblem(IProblem.SCANNER_BAD_CHARACTER, getInputChars(start, fOffset), start);
|
||||
break loop;
|
||||
case '\\':
|
||||
escaped= !escaped;
|
||||
|
@ -676,7 +739,7 @@ final public class Lexer {
|
|||
return newToken(tokenKind, start, length);
|
||||
}
|
||||
|
||||
private Token number(final int start, int length, boolean isFloat) throws CompletionTokenException {
|
||||
private Token number(final int start, int length, boolean isFloat) throws OffsetLimitReachedException {
|
||||
boolean isPartOfNumber= true;
|
||||
int c= fCharPhase3;
|
||||
while (true) {
|
||||
|
@ -733,7 +796,7 @@ final public class Lexer {
|
|||
|
||||
case tEND_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new CompletionTokenException(
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||
newToken((isFloat ? IToken.tFLOATINGPT : IToken.tINTEGER), start, length));
|
||||
}
|
||||
isPartOfNumber= false;
|
||||
|
@ -916,27 +979,25 @@ final public class Lexer {
|
|||
return result;
|
||||
}
|
||||
|
||||
char[] getInput() {
|
||||
return fInput;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the image with trigraphs replaced and line-splices removed.
|
||||
*/
|
||||
char[] getTokenImage(int offset, int endOffset, int imageLength) {
|
||||
final int length= endOffset-offset;
|
||||
private char[] getCharImage(int offset, int endOffset, int imageLength) {
|
||||
final char[] result= new char[imageLength];
|
||||
if (length == imageLength) {
|
||||
System.arraycopy(fInput, offset, result, 0, length);
|
||||
}
|
||||
else {
|
||||
markPhase3();
|
||||
fEndOffset= offset;
|
||||
int idx= 0;
|
||||
while (idx<imageLength) {
|
||||
int c= fetchCharPhase3(fEndOffset);
|
||||
if (c != LINE_SPLICE_SEQUENCE) {
|
||||
result[idx++]= (char) c;
|
||||
}
|
||||
markPhase3();
|
||||
fEndOffset= offset;
|
||||
int idx= 0;
|
||||
while (idx<imageLength) {
|
||||
int c= fetchCharPhase3(fEndOffset);
|
||||
if (c != LINE_SPLICE_SEQUENCE) {
|
||||
result[idx++]= (char) c;
|
||||
}
|
||||
restorePhase3();
|
||||
}
|
||||
restorePhase3();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,292 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
|
||||
/**
|
||||
* Various location contexts which are suitable for interpreting local offsets. These offsets are
|
||||
* converted in a global sequence-number to make all ast nodes comparable with each other.
|
||||
* @since 5.0
|
||||
*/
|
||||
abstract class LocationCtx implements ILocationCtx {
|
||||
final LocationCtx fParent;
|
||||
final int fSequenceNumber;
|
||||
final int fParentOffset;
|
||||
final int fParentEndOffset;
|
||||
|
||||
public LocationCtx(LocationCtx parent, int parentOffset, int parentEndOffset, int sequenceNumber) {
|
||||
fParent= parent;
|
||||
fParentOffset= parentOffset;
|
||||
fParentEndOffset= parentEndOffset;
|
||||
fSequenceNumber= sequenceNumber;
|
||||
}
|
||||
|
||||
public String getFilename() {
|
||||
return fParent.getFilename();
|
||||
}
|
||||
|
||||
final public LocationCtx getParent() {
|
||||
return fParent;
|
||||
}
|
||||
/**
|
||||
* Returns the amount of sequence numbers occupied by this context including its children.
|
||||
*/
|
||||
public abstract int getSequenceLength();
|
||||
|
||||
/**
|
||||
* Converts an offset within this context to the sequence number. In case there are child-contexts
|
||||
* behind the given offset, you need to set checkChildren to <code>true</code>.
|
||||
*/
|
||||
public int getSequenceNumberForOffset(int offset, boolean checkChildren) {
|
||||
return fSequenceNumber+offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* When a child-context is finished it reports its total sequence length, such that offsets in this
|
||||
* context can be converted to sequence numbers.
|
||||
*/
|
||||
public void addChildSequenceLength(int childLength) {
|
||||
assert false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the line number for an offset within this context. Not all contexts support line numbers,
|
||||
* so this may return 0.
|
||||
*/
|
||||
public int getLineNumber(int offset) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the minimal context containing the specified range, assuming that it is contained in
|
||||
* this context.
|
||||
*/
|
||||
public LocationCtx findContextForSequenceNumberRange(int sequenceNumber, int length) {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the minimal file location containing the specified sequence number range, assuming
|
||||
* that it is contained in this context.
|
||||
*/
|
||||
public IASTFileLocation getFileLocationForSequenceNumberRange(int sequenceNumber, int length) {
|
||||
return fParent.getFileLocationForOffsetRange(fParentOffset, fParentEndOffset-fParentOffset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the file location containing the specified offset range in this context.
|
||||
*/
|
||||
public IASTFileLocation getFileLocationForOffsetRange(int parentOffset, int length) {
|
||||
return fParent.getFileLocationForOffsetRange(fParentOffset, fParentEndOffset-fParentOffset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Support for the dependency tree, add inclusion statements found in this context.
|
||||
*/
|
||||
public void getInclusions(ArrayList target) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Support for the dependency tree, returns inclusion statement that created this context, or <code>null</code>.
|
||||
*/
|
||||
public ASTInclusionStatement getInclusionStatement() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class ContainerLocationCtx extends LocationCtx {
|
||||
private int fChildSequenceLength;
|
||||
private ArrayList fChildren;
|
||||
private char[] fSource;
|
||||
|
||||
public ContainerLocationCtx(LocationCtx parent, char[] source, int parentOffset, int parentEndOffset, int sequenceNumber) {
|
||||
super(parent, parentOffset, parentEndOffset, sequenceNumber);
|
||||
fSource= source;
|
||||
}
|
||||
|
||||
public final int getSequenceLength() {
|
||||
return fSource.length + fChildSequenceLength;
|
||||
}
|
||||
public final int getSequenceNumberForOffset(int offset, boolean checkChildren) {
|
||||
int result= fSequenceNumber + fChildSequenceLength + offset;
|
||||
if (checkChildren && fChildren != null) {
|
||||
for (int i= fChildren.size()-1; i >= 0; i--) {
|
||||
final LocationCtx child= (LocationCtx) fChildren.get(i);
|
||||
if (child.fParentEndOffset > offset) { // child was inserted behind the offset, adjust sequence number
|
||||
result-= child.getSequenceLength();
|
||||
}
|
||||
else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void addChildSequenceLength(int childLength) {
|
||||
fChildSequenceLength+= childLength;
|
||||
}
|
||||
|
||||
public final LocationCtx findContextForSequenceNumberRange(int sequenceNumber, int length) {
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber);
|
||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() >= sequenceNumber+length) {
|
||||
return child;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public IASTFileLocation getFileLocationForSequenceNumberRange(int sequenceNumber, int length) {
|
||||
// try to delegate to a child.
|
||||
int useLength= length > 0 ? length-1 : 0;
|
||||
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx child2= findChildLessOrEqualThan(sequenceNumber+useLength);
|
||||
if (child1 == child2 && child1 != null) {
|
||||
return child1.getFileLocationForOffsetRange(sequenceNumber, length);
|
||||
}
|
||||
return super.getFileLocationForSequenceNumberRange(sequenceNumber, length);
|
||||
}
|
||||
|
||||
final LocationCtx findChildLessOrEqualThan(final int sequenceNumber) {
|
||||
if (fChildren == null) {
|
||||
return null;
|
||||
}
|
||||
int upper= fChildren.size();
|
||||
if (upper < 10) {
|
||||
for (int i=upper-1; i>=0; i--) {
|
||||
LocationCtx child= (LocationCtx) fChildren.get(i);
|
||||
if (child.fSequenceNumber <= sequenceNumber) {
|
||||
return child;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
int lower= 0;
|
||||
while (upper > lower) {
|
||||
int middle= (upper+lower)/2;
|
||||
LocationCtx child= (LocationCtx) fChildren.get(middle);
|
||||
if (child.fSequenceNumber <= sequenceNumber) {
|
||||
lower= middle+1;
|
||||
}
|
||||
else {
|
||||
upper= middle;
|
||||
}
|
||||
}
|
||||
if (lower > 0) {
|
||||
return (LocationCtx) fChildren.get(lower-1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void getInclusions(ArrayList result) {
|
||||
for (Iterator iterator = fChildren.iterator(); iterator.hasNext();) {
|
||||
LocationCtx ctx= (LocationCtx) iterator.next();
|
||||
if (ctx.getInclusionStatement() != null) {
|
||||
result.add(new ASTInclusionNode(ctx));
|
||||
}
|
||||
else {
|
||||
ctx.getInclusions(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FileLocationCtx extends ContainerLocationCtx {
|
||||
private final String fFilename;
|
||||
private final ASTInclusionStatement fASTInclude;
|
||||
|
||||
public FileLocationCtx(LocationCtx parent, String filename, char[] source, int parentOffset, int parentEndOffset, int sequenceNumber, ASTInclusionStatement inclusionStatement) {
|
||||
super(parent, source, parentOffset, parentEndOffset, sequenceNumber);
|
||||
fFilename= new String(filename);
|
||||
fASTInclude= inclusionStatement;
|
||||
}
|
||||
|
||||
public final void addChildSequenceLength(int childLength) {
|
||||
super.addChildSequenceLength(childLength);
|
||||
if (fASTInclude != null) {
|
||||
fASTInclude.setLength(fASTInclude.getLength()+childLength);
|
||||
}
|
||||
}
|
||||
|
||||
public final String getFilename() {
|
||||
return fFilename;
|
||||
}
|
||||
|
||||
public IASTFileLocation getFileLocationForSequenceNumberRange(int sequenceNumber, int length) {
|
||||
// try to delegate to a child.
|
||||
final int sequenceEnd= sequenceNumber+length;
|
||||
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx child2= sequenceEnd == sequenceNumber ? child1 : findChildLessOrEqualThan(sequenceEnd-1);
|
||||
if (child1 == child2 && child1 != null) {
|
||||
return child1.getFileLocationForOffsetRange(sequenceNumber, length);
|
||||
}
|
||||
|
||||
// handle here
|
||||
int startOffset;
|
||||
int endOffset;
|
||||
|
||||
if (child1 == null) {
|
||||
startOffset= sequenceNumber-fSequenceNumber;
|
||||
}
|
||||
else {
|
||||
int childSequenceEnd= child1.fSequenceNumber + child1.getSequenceLength();
|
||||
if (sequenceNumber < childSequenceEnd) {
|
||||
startOffset= child1.fParentOffset;
|
||||
}
|
||||
else { // start beyond child1
|
||||
startOffset= child1.fParentEndOffset + sequenceNumber-childSequenceEnd;
|
||||
}
|
||||
}
|
||||
if (child2 == null) {
|
||||
endOffset= sequenceEnd-fSequenceNumber;
|
||||
}
|
||||
else {
|
||||
int childSequenceEnd= child2.fSequenceNumber + child2.getSequenceLength();
|
||||
if (childSequenceEnd < sequenceEnd) { // beyond child2
|
||||
endOffset= child2.fParentEndOffset+sequenceEnd-childSequenceEnd;
|
||||
}
|
||||
else {
|
||||
endOffset= child2.fParentEndOffset;
|
||||
}
|
||||
}
|
||||
return new ASTFileLocation(fFilename, startOffset, endOffset-startOffset);
|
||||
}
|
||||
|
||||
public int getLineNumber(int offset) {
|
||||
// mstodo Auto-generated method stub
|
||||
return super.getLineNumber(offset);
|
||||
}
|
||||
|
||||
public ASTInclusionStatement getInclusionStatement() {
|
||||
return fASTInclude;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MacroExpansionCtx extends LocationCtx {
|
||||
private final int fLength;
|
||||
|
||||
public MacroExpansionCtx(LocationCtx parent, int parentOffset, int parentEndOffset,
|
||||
int sequenceNumber, int length, ImageLocationInfo[] imageLocations, ASTPreprocessorName expansion) {
|
||||
super(parent, parentOffset, parentEndOffset, sequenceNumber);
|
||||
fLength= length;
|
||||
}
|
||||
|
||||
public int getSequenceLength() {
|
||||
return fLength;
|
||||
}
|
||||
|
||||
// mstodo once image locations are supported we need to handle those in here
|
||||
}
|
|
@ -0,0 +1,432 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - Initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.ASTPreprocessorSelectionResult;
|
||||
|
||||
/**
|
||||
* Converts the offsets relative to various contexts to the global sequence number. Also creates and stores
|
||||
* objects that are needed to conform with the IAST... interfaces.
|
||||
* @since 5.0
|
||||
*/
|
||||
public class LocationMap implements ILocationResolver {
|
||||
private static final IASTName[] EMPTY_NAMES = {};
|
||||
|
||||
private String fTranslationUnitPath;
|
||||
private IASTTranslationUnit fTranslationUnit;
|
||||
|
||||
private ArrayList fDirectives= new ArrayList();
|
||||
private ArrayList fProblems= new ArrayList();
|
||||
private ArrayList fComments= new ArrayList();
|
||||
private ArrayList fBuiltinMacros= new ArrayList();
|
||||
private IdentityHashMap fMacroExpansions= new IdentityHashMap();
|
||||
|
||||
private LocationCtx fRootContext= null;
|
||||
private LocationCtx fCurrentContext= null;
|
||||
private int fLastChildInsertionOffset;
|
||||
|
||||
// stuff computed on demand
|
||||
private IdentityHashMap fMacroDefinitionMap= null;
|
||||
|
||||
|
||||
|
||||
public void registerPredefinedMacro(IMacroBinding macro) {
|
||||
registerPredefinedMacro(macro, getCurrentFilename(), 0, 0);
|
||||
}
|
||||
|
||||
public void registerMacroFromIndex(IMacroBinding macro, String filename, int nameOffset, int nameEndOffset, int expansionOffset) {
|
||||
registerPredefinedMacro(macro, filename, getSequenceNumberForOffset(nameOffset), getSequenceNumberForOffset(nameEndOffset));
|
||||
}
|
||||
|
||||
private void registerPredefinedMacro(IMacroBinding macro, String filename, int nameNumber, int nameEndNumber) {
|
||||
ASTMacro astmacro;
|
||||
if (macro.isFunctionStyle()) {
|
||||
astmacro= new ASTFunctionMacro(fTranslationUnit, macro, filename, nameNumber, nameEndNumber);
|
||||
}
|
||||
else {
|
||||
astmacro= new ASTMacro(fTranslationUnit, macro, filename, nameNumber, nameEndNumber);
|
||||
}
|
||||
fBuiltinMacros.add(astmacro);
|
||||
}
|
||||
|
||||
/**
|
||||
* The outermost context must be a translation unit. You must call this method exactly once and before
|
||||
* creating any other context.
|
||||
*/
|
||||
public ILocationCtx pushTranslationUnit(String filename, char[] buffer) {
|
||||
assert fCurrentContext == null;
|
||||
fTranslationUnitPath= filename;
|
||||
fRootContext= fCurrentContext= new FileLocationCtx(null, filename, buffer, 0, 0, 0, null);
|
||||
fLastChildInsertionOffset= 0;
|
||||
return fCurrentContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts an artificial context that can be used to include files without having a source that contains
|
||||
* the include directives.
|
||||
* @param buffer a buffer containing the include directives.
|
||||
* @param isMacroFile whether the context is used for running the preprocessor, only.
|
||||
*/
|
||||
public ILocationCtx pushPreInclusion(char[] buffer, int offset, boolean isMacroFile) {
|
||||
assert fCurrentContext != null;
|
||||
int sequenceNumber= getSequenceNumberForOffset(offset);
|
||||
fCurrentContext= new ContainerLocationCtx(fCurrentContext, buffer, offset, offset, sequenceNumber);
|
||||
fLastChildInsertionOffset= 0;
|
||||
return fCurrentContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a context for an included file.
|
||||
* @param buffer the buffer containing the content of the inclusion.
|
||||
* @param filename the filename of the included file
|
||||
* @param startOffset offset in the current context.
|
||||
* @param nameOffset offset in the current context.
|
||||
* @param endOffset offset in the current context
|
||||
* @param name name of the include without delimiters ("" or <>)
|
||||
* @param userInclude <code>true</code> when specified with double-quotes.
|
||||
*/
|
||||
public ILocationCtx pushInclusion(int startOffset, int nameOffset, int nameEndOffset, int endOffset,
|
||||
char[] buffer, String filename, char[] name, boolean userInclude) {
|
||||
assert fCurrentContext != null;
|
||||
int startNumber= getSequenceNumberForOffset(startOffset);
|
||||
int nameNumber= getSequenceNumberForOffset(nameOffset);
|
||||
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
||||
int endNumber= getSequenceNumberForOffset(endOffset);
|
||||
final ASTInclusionStatement inclusionStatement=
|
||||
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, endNumber, name, filename, userInclude, true);
|
||||
fDirectives.add(inclusionStatement);
|
||||
fCurrentContext= new FileLocationCtx(fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
||||
fLastChildInsertionOffset= 0;
|
||||
return fCurrentContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a name representing an implicit macro expansion. The returned name can be fed into
|
||||
* {@link #pushMacroExpansion(int, int, int, int, IMacroBinding, IASTName[])}.
|
||||
* @param macro the macro that has been expanded
|
||||
* @param imageLocationInfo the image-location for the name of the macro.
|
||||
*/
|
||||
public IASTName encounterImplicitMacroExpansion(IPreprocessorMacro macro, ImageLocationInfo imageLocationInfo) {
|
||||
return new ASTMacroReferenceName(fTranslationUnit, macro, imageLocationInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new context for the result of a (recursive) macro-expansion.
|
||||
* @param startOffset offset within the current context where macro-expansion starts.
|
||||
* @param nameOffset offset within the current context where the name for the macro-expansion starts.
|
||||
* @param nameEndOffset offset within the current context where the name for the macro-expansion ends.
|
||||
* @param endOffset offset within the current context where the entire macro-expansion ends.
|
||||
* @param macro the outermost macro that got expanded.
|
||||
* @param implicitMacroReferences an array of implicit macro-expansions.
|
||||
* @param imageLocations an array of image-locations for the new context.
|
||||
*/
|
||||
public ILocationCtx pushMacroExpansion(int startOffset, int nameOffset, int nameEndOffset, int endOffset, int contextLength,
|
||||
IPreprocessorMacro macro, IASTName[] implicitMacroReferences, ImageLocationInfo[] imageLocations) {
|
||||
int startNumber= getSequenceNumberForOffset(startOffset);
|
||||
int nameNumber= getSequenceNumberForOffset(nameOffset);
|
||||
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
||||
int endNumber= getSequenceNumberForOffset(endOffset);
|
||||
|
||||
for (int i = 0; i < implicitMacroReferences.length; i++) {
|
||||
ASTMacroReferenceName name = (ASTMacroReferenceName) implicitMacroReferences[i];
|
||||
name.setOffsetAndLength(startNumber, endNumber);
|
||||
addExpansion((IPreprocessorMacro) name.getBinding(), name);
|
||||
}
|
||||
|
||||
ASTPreprocessorName expansion= new ASTPreprocessorName(fTranslationUnit, IASTTranslationUnit.EXPANSION_NAME,
|
||||
nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
||||
addExpansion(macro, expansion);
|
||||
|
||||
fCurrentContext= new MacroExpansionCtx(fCurrentContext, startOffset, endOffset, endNumber, contextLength, imageLocations, expansion);
|
||||
fLastChildInsertionOffset= 0;
|
||||
return fCurrentContext;
|
||||
}
|
||||
|
||||
private void addExpansion(IPreprocessorMacro macro, ASTPreprocessorName name) {
|
||||
List list= (List) fMacroExpansions.get(macro);
|
||||
if (list == null) {
|
||||
list= new ArrayList();
|
||||
fMacroExpansions.put(macro, list);
|
||||
}
|
||||
list.add(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ends the current context.
|
||||
* @param locationCtx the current context, used to check whether caller and location map are still in sync.
|
||||
*/
|
||||
public void popContext(ILocationCtx locationCtx) {
|
||||
assert fCurrentContext == locationCtx;
|
||||
final LocationCtx child= fCurrentContext;
|
||||
final LocationCtx parent= fCurrentContext.getParent();
|
||||
if (parent != null) {
|
||||
fCurrentContext= child.getParent();
|
||||
fLastChildInsertionOffset= child.fParentEndOffset;
|
||||
parent.addChildSequenceLength(child.getSequenceLength());
|
||||
fCurrentContext= parent;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reports an inclusion that is not performed.
|
||||
* @param startOffset offset in the current context.
|
||||
* @param nameOffset offset in the current context.
|
||||
* @param endOffset offset in the current context
|
||||
* @param name name of the include without delimiters ("" or <>)
|
||||
* @param filename the filename of the included file
|
||||
* @param userInclude <code>true</code> when specified with double-quotes.
|
||||
* @param active <code>true</code> when include appears in active code.
|
||||
*/
|
||||
public void encounterPoundInclude(int startOffset, int nameOffset, int nameEndOffset, int endOffset,
|
||||
char[] name, String filename, boolean userInclude, boolean active) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
||||
nameOffset= getSequenceNumberForOffset(nameOffset); // there may be a macro expansion
|
||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, endOffset, name, filename, userInclude, active));
|
||||
}
|
||||
|
||||
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
||||
offset= getSequenceNumberForOffset(offset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fComments.add(new ASTComment(fTranslationUnit, offset, endOffset, isBlockComment));
|
||||
}
|
||||
|
||||
public void encounterProblem(int id, char[] arg, int offset, int endOffset) {
|
||||
offset= getSequenceNumberForOffset(offset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
ASTProblem problem = new ASTProblem(id, arg, offset, endOffset);
|
||||
fProblems.add(problem);
|
||||
}
|
||||
|
||||
public void encounterPoundElse(int startOffset, int endOffset, boolean isActive) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTElse(fTranslationUnit, startOffset, endOffset, isActive));
|
||||
}
|
||||
|
||||
public void encounterPoundElif(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
||||
condOffset= getSequenceNumberForOffset(condOffset); // there may be a macro expansion
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTElif(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
||||
}
|
||||
|
||||
public void encounterPoundEndIf(int startOffset, int endOffset) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTEndif(fTranslationUnit, startOffset, endOffset));
|
||||
}
|
||||
|
||||
public void encounterPoundError(int startOffset, int condOffset, int condEndOffset, int endOffset) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
condOffset= getSequenceNumberForOffset(condOffset);
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTError(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset));
|
||||
}
|
||||
|
||||
public void encounterPoundPragma(int startOffset, int condOffset, int condEndOffset, int endOffset) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
condOffset= getSequenceNumberForOffset(condOffset);
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTPragma(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset));
|
||||
}
|
||||
|
||||
public void encounterPoundIfdef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
condOffset= getSequenceNumberForOffset(condOffset);
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTIfdef(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
||||
}
|
||||
|
||||
public void encounterPoundIfndef(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
condOffset= getSequenceNumberForOffset(condOffset);
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTIfndef(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
||||
}
|
||||
|
||||
public void encounterPoundIf(int startOffset, int condOffset, int condEndOffset, int endOffset, boolean isActive) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset); // there may be a macro expansion
|
||||
condOffset= getSequenceNumberForOffset(condOffset); // there may be a macro expansion
|
||||
condEndOffset= getSequenceNumberForOffset(condEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTIf(fTranslationUnit, startOffset, condOffset, condEndOffset, endOffset, isActive));
|
||||
}
|
||||
|
||||
public void encounterPoundDefine(int startOffset, int nameOffset, int nameEndOffset, int expansionOffset, int endOffset, IMacroBinding macrodef) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||
expansionOffset= getSequenceNumberForOffset(expansionOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
ASTPreprocessorNode astMacro;
|
||||
if (!macrodef.isFunctionStyle()) {
|
||||
astMacro= new ASTMacro(fTranslationUnit, macrodef, startOffset, nameOffset, nameEndOffset, expansionOffset, endOffset);
|
||||
}
|
||||
else {
|
||||
astMacro= new ASTFunctionMacro(fTranslationUnit, macrodef, startOffset, nameOffset, nameEndOffset, expansionOffset, endOffset);
|
||||
}
|
||||
fDirectives.add(astMacro);
|
||||
}
|
||||
|
||||
public void encounterPoundUndef(PreprocessorMacro definition, int startOffset, int nameOffset, int nameEndOffset, int endOffset, char[] name) {
|
||||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTUndef(fTranslationUnit, name, startOffset, nameOffset, nameEndOffset, endOffset));
|
||||
}
|
||||
|
||||
public void setRootNode(IASTTranslationUnit root) {
|
||||
fTranslationUnit= root;
|
||||
}
|
||||
|
||||
public String getTranslationUnitPath() {
|
||||
return fTranslationUnitPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Line number of offset in current context.
|
||||
* @param offset in current context.
|
||||
*/
|
||||
public int getCurrentLineNumber(int offset) {
|
||||
return fCurrentContext.getLineNumber(offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the filename of the current context. If the context is a macro-expansion the filename of
|
||||
* the enclosing file is returned.
|
||||
*/
|
||||
public String getCurrentFilename() {
|
||||
return fCurrentContext.getFilename();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sequence number corresponding to the offset in the current context.
|
||||
* <p>
|
||||
* You must insert all child contexts before the given offset before conversion.
|
||||
*/
|
||||
private int getSequenceNumberForOffset(int offset) {
|
||||
return fCurrentContext.getSequenceNumberForOffset(offset, offset < fLastChildInsertionOffset);
|
||||
}
|
||||
|
||||
public String getContainingFilename(int sequenceNumber) {
|
||||
LocationCtx ctx= fRootContext.findContextForSequenceNumberRange(sequenceNumber, 1);
|
||||
return new String(ctx.getFilename());
|
||||
}
|
||||
|
||||
public IASTFileLocation getMappedFileLocation(int sequenceNumber, int length) {
|
||||
return fRootContext.getFileLocationForSequenceNumberRange(sequenceNumber, length);
|
||||
}
|
||||
|
||||
public IASTPreprocessorMacroDefinition[] getMacroDefinitions() {
|
||||
ArrayList result= new ArrayList();
|
||||
for (Iterator iterator = fDirectives.iterator(); iterator.hasNext();) {
|
||||
Object directive= iterator.next();
|
||||
if (directive instanceof IASTPreprocessorMacroDefinition) {
|
||||
result.add(directive);
|
||||
}
|
||||
}
|
||||
return (IASTPreprocessorMacroDefinition[]) result.toArray(new IASTPreprocessorMacroDefinition[result.size()]);
|
||||
}
|
||||
|
||||
public IASTPreprocessorIncludeStatement[] getIncludeDirectives() {
|
||||
ArrayList result= new ArrayList();
|
||||
for (Iterator iterator = fDirectives.iterator(); iterator.hasNext();) {
|
||||
Object directive= iterator.next();
|
||||
if (directive instanceof IASTPreprocessorIncludeStatement) {
|
||||
result.add(directive);
|
||||
}
|
||||
}
|
||||
return (IASTPreprocessorIncludeStatement[]) result.toArray(new IASTPreprocessorIncludeStatement[result.size()]);
|
||||
}
|
||||
|
||||
public IASTPreprocessorStatement[] getAllPreprocessorStatements() {
|
||||
return (IASTPreprocessorStatement[]) fDirectives.toArray(new IASTPreprocessorStatement[fDirectives.size()]);
|
||||
}
|
||||
|
||||
public IASTPreprocessorMacroDefinition[] getBuiltinMacroDefinitions() {
|
||||
return (IASTPreprocessorMacroDefinition[]) fBuiltinMacros.toArray(new IASTPreprocessorMacroDefinition[fBuiltinMacros.size()]);
|
||||
}
|
||||
|
||||
public IASTProblem[] getScannerProblems() {
|
||||
return (IASTProblem[]) fProblems.toArray(new IASTProblem[fProblems.size()]);
|
||||
}
|
||||
|
||||
|
||||
public IASTName[] getDeclarations(IMacroBinding binding) {
|
||||
if (fMacroDefinitionMap == null) {
|
||||
fMacroDefinitionMap= new IdentityHashMap();
|
||||
IASTPreprocessorMacroDefinition[] defs= getMacroDefinitions();
|
||||
for (int i = 0; i < defs.length; i++) {
|
||||
final IASTName name = defs[i].getName();
|
||||
if (name != null) {
|
||||
fMacroDefinitionMap.put(name.getBinding(), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
IASTName name= (IASTName) fMacroDefinitionMap.get(binding);
|
||||
return name == null ? EMPTY_NAMES : new IASTName[] {name};
|
||||
}
|
||||
|
||||
public IASTName[] getReferences(IMacroBinding binding) {
|
||||
List list= (List) fMacroExpansions.get(binding);
|
||||
if (list == null) {
|
||||
return EMPTY_NAMES;
|
||||
}
|
||||
return (IASTName[]) list.toArray(new IASTName[list.size()]);
|
||||
}
|
||||
|
||||
public IDependencyTree getDependencyTree() {
|
||||
return new DependencyTree(fRootContext);
|
||||
}
|
||||
|
||||
// stuff to remove from ILocationResolver
|
||||
public IASTName[] getMacroExpansions() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public void cleanup() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public IASTFileLocation flattenLocations(IASTNodeLocation[] nodeLocations) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public IASTNodeLocation[] getLocations(int offset, int length) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.Keywords;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
|
||||
/**
|
||||
* Utility to parse macro definitions and create the macro objects for the preprocessor.
|
||||
* @since 5.0
|
||||
*/
|
||||
class MacroDefinitionParser {
|
||||
static class InvalidMacroDefinitionException extends Exception {
|
||||
public char[] fName;
|
||||
public InvalidMacroDefinitionException(char[] name) {
|
||||
fName= name;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int ORIGIN_PREPROCESSOR_DIRECTIVE = OffsetLimitReachedException.ORIGIN_PREPROCESSOR_DIRECTIVE;
|
||||
|
||||
private int fHasVarArgs;
|
||||
private int fExpansionOffset;
|
||||
private int fExpansionEndOffset;
|
||||
private Token fNameToken;
|
||||
|
||||
/**
|
||||
* In case the name was successfully parsed, the name token is returned.
|
||||
* Otherwise the return value is undefined.
|
||||
*/
|
||||
public Token getNameToken() {
|
||||
return fNameToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* In case the expansion was successfully parsed, the start offset is returned.
|
||||
* Otherwise the return value is undefined.
|
||||
*/
|
||||
public int getExpansionOffset() {
|
||||
return fExpansionOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* In case the expansion was successfully parsed, the end offset is returned.
|
||||
* Otherwise the return value is undefined.
|
||||
*/
|
||||
public int getExpansionEndOffset() {
|
||||
return fExpansionEndOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an entire macro definition. Name must be the next token of the lexer.
|
||||
*/
|
||||
public ObjectStyleMacro parseMacroDefinition(final Lexer lexer, final ILexerLog log)
|
||||
throws OffsetLimitReachedException, InvalidMacroDefinitionException {
|
||||
final Token name = parseName(lexer);
|
||||
final char[] source= lexer.getInput();
|
||||
final char[] nameChars= name.getCharImage();
|
||||
final char[][] paramList= parseParamList(lexer, name);
|
||||
final Token replacement= parseExpansion(lexer, log, nameChars, paramList, fHasVarArgs);
|
||||
if (paramList == null) {
|
||||
return new ObjectStyleMacro(nameChars, fExpansionOffset, fExpansionEndOffset, replacement, source);
|
||||
}
|
||||
return new FunctionStyleMacro(nameChars, paramList, fHasVarArgs, fExpansionOffset, fExpansionEndOffset, replacement, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a macro definition without the replacement. Name must be the next token of the lexer.
|
||||
*/
|
||||
public PreprocessorMacro parseMacroDefinition(final Lexer lexer, final ILexerLog log, final char[] replacement)
|
||||
throws InvalidMacroDefinitionException, OffsetLimitReachedException {
|
||||
final Token name = parseName(lexer);
|
||||
|
||||
final char[] nameChars = name.getCharImage();
|
||||
final char[][] paramList= parseParamList(lexer, name);
|
||||
final Token replacementToken = lexer.currentToken();
|
||||
if (replacementToken.getType() != Lexer.tEND_OF_INPUT) {
|
||||
throw new InvalidMacroDefinitionException(nameChars);
|
||||
}
|
||||
|
||||
if (paramList == null) {
|
||||
return new ObjectStyleMacro(nameChars, replacement);
|
||||
}
|
||||
return new FunctionStyleMacro(nameChars, paramList, fHasVarArgs, replacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a macro definition basically checking for var-args.
|
||||
*/
|
||||
public PreprocessorMacro parseMacroDefinition(final char[] name, char[][] paramList, final char[] replacement) {
|
||||
final int length = paramList.length;
|
||||
fHasVarArgs= 0;
|
||||
if (paramList != null && length > 0) {
|
||||
char[] lastParam= paramList[length-1];
|
||||
final int lpl = lastParam.length;
|
||||
switch(lpl) {
|
||||
case 0: case 1: case 2:
|
||||
break;
|
||||
case 3:
|
||||
if (CharArrayUtils.equals(lastParam, Keywords.cpELLIPSIS)) {
|
||||
fHasVarArgs= FunctionStyleMacro.VAARGS;
|
||||
char[][] copy= new char[length][];
|
||||
System.arraycopy(paramList, 0, copy, 0, length-1);
|
||||
copy[length-1]= Keywords.cVA_ARGS;
|
||||
paramList= copy;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (CharArrayUtils.equals(lastParam, lpl-3, 3, Keywords.cpELLIPSIS)) {
|
||||
fHasVarArgs= FunctionStyleMacro.NAMED_VAARGS;
|
||||
char[][] copy= new char[length][];
|
||||
System.arraycopy(paramList, 0, copy, 0, length-1);
|
||||
copy[length-1]= CharArrayUtils.subarray(lastParam, 0, lpl-3);
|
||||
paramList= copy;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (paramList == null) {
|
||||
return new ObjectStyleMacro(name, replacement);
|
||||
}
|
||||
return new FunctionStyleMacro(name, paramList, fHasVarArgs, replacement);
|
||||
}
|
||||
|
||||
private Token parseName(final Lexer lexer) throws OffsetLimitReachedException, InvalidMacroDefinitionException {
|
||||
final Token name= lexer.nextToken();
|
||||
final int tt= name.getType();
|
||||
if (tt != IToken.tIDENTIFIER) {
|
||||
if (tt == IToken.tCOMPLETION) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, name);
|
||||
}
|
||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
||||
}
|
||||
fNameToken= name;
|
||||
return name;
|
||||
}
|
||||
|
||||
private char[][] parseParamList(Lexer lex, final Token name) throws OffsetLimitReachedException, InvalidMacroDefinitionException {
|
||||
final Token lparen= lex.nextToken();
|
||||
fHasVarArgs= FunctionStyleMacro.NO_VAARGS;
|
||||
if (lparen.getType() != IToken.tLPAREN || name.getEndOffset() != lparen.getOffset()) {
|
||||
return null;
|
||||
}
|
||||
ArrayList paramList= new ArrayList();
|
||||
IToken next= null;
|
||||
do {
|
||||
final Token param= lex.nextToken();
|
||||
switch (param.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, param);
|
||||
|
||||
case IToken.tIDENTIFIER:
|
||||
paramList.add(param.getCharImage());
|
||||
next= lex.nextToken();
|
||||
if (next.getType() == IToken.tELLIPSIS) {
|
||||
fHasVarArgs= FunctionStyleMacro.NAMED_VAARGS;
|
||||
next= lex.nextToken();
|
||||
}
|
||||
break;
|
||||
|
||||
case IToken.tELLIPSIS:
|
||||
fHasVarArgs= FunctionStyleMacro.VAARGS;
|
||||
paramList.add(Keywords.cVA_ARGS);
|
||||
next= lex.nextToken();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
||||
}
|
||||
}
|
||||
while (fHasVarArgs==0 && next.getType() == IToken.tCOMMA);
|
||||
if (next.getType() != IToken.tRPAREN) {
|
||||
throw new InvalidMacroDefinitionException(name.getCharImage());
|
||||
}
|
||||
next= lex.nextToken(); // consume the closing parenthesis
|
||||
|
||||
return (char[][]) paramList.toArray(new char[paramList.size()][]);
|
||||
}
|
||||
|
||||
private Token parseExpansion(final Lexer lexer, final ILexerLog log, final char[] name, final char[][] paramList, final int hasVarArgs)
|
||||
throws OffsetLimitReachedException {
|
||||
final boolean allowVarArgsArray= hasVarArgs==FunctionStyleMacro.VAARGS;
|
||||
boolean needParam= false;
|
||||
Token needAnotherToken= null;
|
||||
|
||||
Token candidate= lexer.currentToken();
|
||||
fExpansionOffset= candidate.getOffset();
|
||||
Token last= new SimpleToken(Lexer.tNEWLINE, fExpansionOffset, fExpansionOffset);
|
||||
final Token resultHolder= last;
|
||||
|
||||
loop: while(true) {
|
||||
switch(candidate.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, candidate);
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
case Lexer.tNEWLINE:
|
||||
break loop;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (!allowVarArgsArray && CharArrayUtils.equals(Keywords.cVA_ARGS, candidate.getCharImage())) {
|
||||
log.handleProblem(IProblem.PREPROCESSOR_INVALID_VA_ARGS, null, fExpansionOffset, candidate.getEndOffset());
|
||||
}
|
||||
if (needParam && CharArrayUtils.indexOf(candidate.getCharImage(), paramList) == -1) {
|
||||
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
|
||||
}
|
||||
needParam= false;
|
||||
needAnotherToken= null;
|
||||
break;
|
||||
case IToken.tPOUND:
|
||||
needParam= paramList != null;
|
||||
break;
|
||||
case IToken.tPOUNDPOUND:
|
||||
if (needParam || resultHolder == last) {
|
||||
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
|
||||
}
|
||||
needAnotherToken= candidate;
|
||||
needParam= false;
|
||||
break;
|
||||
default:
|
||||
if (needParam) {
|
||||
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, fExpansionOffset, candidate.getEndOffset());
|
||||
needParam= false;
|
||||
}
|
||||
needAnotherToken= null;
|
||||
break;
|
||||
}
|
||||
last.setNext(candidate); last=candidate;
|
||||
candidate= lexer.nextToken();
|
||||
}
|
||||
if (needAnotherToken != null) {
|
||||
log.handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, name, needAnotherToken.getOffset(), needAnotherToken.getEndOffset());
|
||||
}
|
||||
fExpansionEndOffset= last.getEndOffset();
|
||||
return (Token) resultHolder.getNext();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ILinkage;
|
||||
import org.eclipse.cdt.core.dom.ast.IScope;
|
||||
import org.eclipse.cdt.core.parser.Keywords;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
import org.eclipse.cdt.internal.core.dom.Linkage;
|
||||
|
||||
/**
|
||||
* Models macros used by the preprocessor
|
||||
* @since 5.0
|
||||
*/
|
||||
abstract class PreprocessorMacro implements IPreprocessorMacro {
|
||||
final private char[] fName;
|
||||
|
||||
public PreprocessorMacro(char[] name) {
|
||||
fName= name;
|
||||
}
|
||||
|
||||
final public ILinkage getLinkage() {
|
||||
return Linkage.NO_LINKAGE;
|
||||
}
|
||||
|
||||
final public char[] getNameCharArray() {
|
||||
return fName;
|
||||
}
|
||||
|
||||
final public String getName() {
|
||||
return new String(fName);
|
||||
}
|
||||
|
||||
public IScope getScope() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean isFunctionStyle() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Object getAdapter(Class clazz) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
abstract class DynamicStyleMacro extends PreprocessorMacro {
|
||||
|
||||
public DynamicStyleMacro(char[] name) {
|
||||
super(name);
|
||||
}
|
||||
public char[] getExpansion() {
|
||||
return execute().getCharImage();
|
||||
}
|
||||
public abstract Token execute();
|
||||
}
|
||||
|
||||
class ObjectStyleMacro extends PreprocessorMacro {
|
||||
private static final Token NOT_INITIALIZED = new SimpleToken(0,0,0);
|
||||
|
||||
private final char[] fExpansion;
|
||||
final int fExpansionOffset;
|
||||
final int fEndOffset;
|
||||
// private Token fExpansionTokens;
|
||||
|
||||
public ObjectStyleMacro(char[] name, char[] expansion) {
|
||||
this(name, 0, expansion.length, NOT_INITIALIZED, expansion);
|
||||
}
|
||||
|
||||
public ObjectStyleMacro(char[] name, int expansionOffset, int endOffset, Token expansion, char[] source) {
|
||||
super(name);
|
||||
fExpansionOffset= expansionOffset;
|
||||
fEndOffset= endOffset;
|
||||
fExpansion= source;
|
||||
// fExpansionTokens= expansion;
|
||||
}
|
||||
|
||||
public int findParameter(char[] tokenImage) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
public char[] getExpansion() {
|
||||
final int length = fEndOffset - fExpansionOffset;
|
||||
if (length == fExpansion.length) {
|
||||
return fExpansion;
|
||||
}
|
||||
char[] result= new char[length];
|
||||
System.arraycopy(fExpansion, fEndOffset, result, 0, length);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FunctionStyleMacro extends ObjectStyleMacro {
|
||||
public static final int NO_VAARGS = 0; // M(a)
|
||||
public static final int VAARGS = 1; // M(...)
|
||||
public static final int NAMED_VAARGS= 2; // M(a...)
|
||||
|
||||
final private char[][] fParamList;
|
||||
final private int fHasVarArgs;
|
||||
private char[] fSignature;
|
||||
|
||||
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, char[] expansion) {
|
||||
super(name, expansion);
|
||||
fParamList = paramList;
|
||||
fHasVarArgs= hasVarArgs;
|
||||
}
|
||||
|
||||
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, int expansionFileOffset, int endFileOffset,
|
||||
Token expansion, char[] source) {
|
||||
super(name, expansionFileOffset, endFileOffset, expansion, source);
|
||||
fParamList = paramList;
|
||||
fHasVarArgs= hasVarArgs;
|
||||
}
|
||||
|
||||
public char[][] getParamList() {
|
||||
return fParamList;
|
||||
}
|
||||
|
||||
public char[] getSignature() {
|
||||
if (fSignature != null) {
|
||||
return fSignature;
|
||||
}
|
||||
|
||||
StringBuffer result= new StringBuffer();
|
||||
result.append(getName());
|
||||
result.append('(');
|
||||
|
||||
final int lastIdx= fParamList.length-1;
|
||||
if (lastIdx >= 0) {
|
||||
for (int i = 0; i < lastIdx; i++) {
|
||||
result.append(fParamList[i]);
|
||||
result.append(',');
|
||||
}
|
||||
switch(fHasVarArgs) {
|
||||
case VAARGS:
|
||||
result.append(Keywords.cpELLIPSIS);
|
||||
break;
|
||||
case NAMED_VAARGS:
|
||||
result.append(fParamList[lastIdx]);
|
||||
result.append(Keywords.cpELLIPSIS);
|
||||
break;
|
||||
default:
|
||||
result.append(fParamList[lastIdx]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
result.append(')');
|
||||
final int len= result.length();
|
||||
final char[] sig= new char[len];
|
||||
result.getChars(0, len, sig, 0);
|
||||
fSignature= sig;
|
||||
return sig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns one of {@link #NO_VAARGS}, {@link #VAARGS} or {@link #NAMED_VAARGS}.
|
||||
*/
|
||||
public int hasVarArgs() {
|
||||
return fHasVarArgs;
|
||||
}
|
||||
|
||||
public int findParameter(final char[] identifier) {
|
||||
for (int i=0; i < fParamList.length; i++) {
|
||||
if (CharArrayUtils.equals(fParamList[i], identifier)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
public boolean isFunctionStyle() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Represents part of the input to the preprocessor. This may be a file or the result of a macro expansion.
|
||||
* @since 5.0
|
||||
*/
|
||||
abstract class ScannerContext {
|
||||
public static final Integer BRANCH_IF = new Integer(0);
|
||||
public static final Integer BRANCH_ELIF = new Integer(1);
|
||||
public static final Integer BRANCH_ELSE = new Integer(2);
|
||||
public static final Integer BRANCH_END = new Integer(3);
|
||||
|
||||
private final ILocationCtx fLocationCtx;
|
||||
private final ScannerContext fParent;
|
||||
|
||||
/**
|
||||
* @param ctx
|
||||
* @param parent context to be used after this context is done.
|
||||
*/
|
||||
public ScannerContext(ILocationCtx ctx, ScannerContext parent) {
|
||||
fLocationCtx= ctx;
|
||||
fParent= parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the location context associated with this scanner context.
|
||||
*/
|
||||
public final ILocationCtx getLocationCtx() {
|
||||
return fLocationCtx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the parent context which will be used after this context is finished.
|
||||
* May return <code>null</code>.
|
||||
*/
|
||||
public final ScannerContext getParent() {
|
||||
return fParent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether or not the current identifier of this context are subject to macro-expansion.
|
||||
*/
|
||||
public boolean expandsMacros() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the lexer for a preprocessing directive or <code>null</code> if the current
|
||||
* token is not the start of a preprocessing directive.
|
||||
* <p>
|
||||
* The current token starts a directive, whenever the context supports directives,
|
||||
* and the current token is a pound that occurs as the first token on the line.
|
||||
*/
|
||||
public abstract Lexer getLexerForPPDirective();
|
||||
|
||||
/**
|
||||
* Needs to be called whenever we change over to another branch of conditional
|
||||
* compilation. Returns whether the change is legal at this point or not.
|
||||
*/
|
||||
public abstract boolean changeBranch(Integer state);
|
||||
|
||||
|
||||
public abstract Token currentPPToken();
|
||||
public abstract Token nextPPToken() throws OffsetLimitReachedException;
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
/**
|
||||
* Wraps a {@link Lexer} and provides additional information for the preprocessor.
|
||||
* <p>
|
||||
* Note that for parsing the preprocessor directives the lexer is used directly, so this class
|
||||
* is not allowed to store any state about the lexing process.
|
||||
*
|
||||
* since 5.0
|
||||
*/
|
||||
public class ScannerContextFile extends ScannerContext {
|
||||
|
||||
private final Lexer fLexer;
|
||||
private final ArrayList fBranches= new ArrayList();
|
||||
|
||||
public ScannerContextFile(ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||
super(ctx, parent);
|
||||
fLexer= lexer;
|
||||
}
|
||||
|
||||
public Token currentPPToken() {
|
||||
return fLexer.currentToken();
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
return fLexer.nextToken();
|
||||
}
|
||||
|
||||
public Lexer getLexerForPPDirective() {
|
||||
if (fLexer.currentTokenIsFirstOnLine() && fLexer.currentToken().getType() == IToken.tPOUND) {
|
||||
return fLexer;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean changeBranch(Integer branchKind) {
|
||||
// an if starts a new conditional construct
|
||||
if (branchKind == BRANCH_IF) {
|
||||
fBranches.add(branchKind);
|
||||
return true;
|
||||
}
|
||||
// if we are not inside of an conditional there shouldn't be an #else, #elsif or #end
|
||||
final int pos= fBranches.size()-1;
|
||||
if (pos < 0) {
|
||||
return false;
|
||||
}
|
||||
// an #end just pops one construct.
|
||||
if (branchKind == BRANCH_END) {
|
||||
fBranches.remove(pos);
|
||||
return true;
|
||||
}
|
||||
// #elsif or #else cannot appear after another #else
|
||||
if (fBranches.get(pos) == BRANCH_ELSE) {
|
||||
return false;
|
||||
}
|
||||
// overwrite #if, #elsif with #elsif or #else
|
||||
fBranches.set(pos, branchKind);
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
/**
|
||||
* Context used to run the preprocessor while swallowing all tokens.
|
||||
* Needed to process macro-files as specified by the -imacro compiler option of gcc.
|
||||
* @since 5.0
|
||||
*/
|
||||
public class ScannerContextMacroFile extends ScannerContextFile {
|
||||
private final CPreprocessor fCpp;
|
||||
private boolean fSkippingTokens= false;
|
||||
|
||||
public ScannerContextMacroFile(CPreprocessor cpp, ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||
super(ctx, parent, lexer);
|
||||
fCpp= cpp;
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
if (fSkippingTokens) {
|
||||
final Token t= super.nextPPToken();
|
||||
if (t.getType() == Lexer.tEND_OF_INPUT) {
|
||||
fSkippingTokens= false;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
// use preprocessor to read tokens off this context, until this context is done.
|
||||
fSkippingTokens= true;
|
||||
Token t;
|
||||
do {
|
||||
t= fCpp.fetchTokenFromPreprocessor();
|
||||
} while (fSkippingTokens);
|
||||
return t;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,119 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.Keywords;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
|
||||
/**
|
||||
* Wraps a ScannerContext and modifies its behavior by limiting the tokens
|
||||
* to the ones on the current line. Instead of the newline token an end-of-input
|
||||
* token is returned. The newline token of the underlying context is not consumed.
|
||||
* @since 5.0
|
||||
*/
|
||||
public final class ScannerContextPPDirective extends ScannerContext {
|
||||
|
||||
private static final int STATE_PREVENT_EXPANSION = 1;
|
||||
private static final int STATE_DEFINED_LPAREN = 2;
|
||||
private static final int STATE_DEFINED = 3;
|
||||
private final Lexer fLexer;
|
||||
private Token fToken;
|
||||
private boolean fConvertDefinedToken;
|
||||
private int fPreventMacroExpansion= 0;
|
||||
private int fLastEndOffset;
|
||||
|
||||
public ScannerContextPPDirective(Lexer lexer, boolean convertDefinedToken) {
|
||||
super(null, null);
|
||||
fLexer= lexer;
|
||||
final Token currentToken = lexer.currentToken();
|
||||
fLastEndOffset= currentToken.getOffset();
|
||||
fToken= convertToken(currentToken);
|
||||
fConvertDefinedToken= convertDefinedToken;
|
||||
}
|
||||
|
||||
public Token currentPPToken() {
|
||||
return fToken;
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
if (fToken.getType() == Lexer.tEND_OF_INPUT) {
|
||||
return fToken;
|
||||
}
|
||||
Token t1= fLexer.nextToken();
|
||||
t1 = convertToken(t1);
|
||||
fToken= t1;
|
||||
|
||||
Token t = t1;
|
||||
return t;
|
||||
}
|
||||
|
||||
public Lexer getLexerForPPDirective() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean changeBranch(Integer state) {
|
||||
return false;
|
||||
}
|
||||
|
||||
private Token convertToken(Token t) {
|
||||
switch (t.getType()) {
|
||||
case Lexer.tNEWLINE:
|
||||
t= new SimpleToken(Lexer.tEND_OF_INPUT, fToken.getEndOffset(), fToken.getEndOffset());
|
||||
break;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, fToken.getCharImage())) {
|
||||
t.setType(CPreprocessor.tDEFINED);
|
||||
fPreventMacroExpansion= STATE_DEFINED;
|
||||
}
|
||||
else {
|
||||
switch(fPreventMacroExpansion) {
|
||||
case STATE_DEFINED:
|
||||
case STATE_DEFINED_LPAREN:
|
||||
fPreventMacroExpansion= STATE_PREVENT_EXPANSION;
|
||||
break;
|
||||
default:
|
||||
fPreventMacroExpansion= 0;
|
||||
}
|
||||
}
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
case IToken.tLPAREN:
|
||||
if (fPreventMacroExpansion == STATE_DEFINED) {
|
||||
fPreventMacroExpansion= STATE_DEFINED_LPAREN; // suppress macro-expansion for 'defined (id)'
|
||||
}
|
||||
else {
|
||||
fPreventMacroExpansion= 0;
|
||||
}
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
default:
|
||||
fPreventMacroExpansion= 0;
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
public boolean expandsMacros() {
|
||||
return fPreventMacroExpansion == 0;
|
||||
}
|
||||
|
||||
public void setInsideIncludeDirective() {
|
||||
fLexer.setInsideIncludeDirective();
|
||||
}
|
||||
|
||||
public int getLastEndOffset() {
|
||||
return fLastEndOffset;
|
||||
}
|
||||
}
|
|
@ -12,14 +12,17 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
|||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
|
||||
|
||||
/**
|
||||
* Represents tokens found by the lexer. The preprocessor reuses the tokens and passes
|
||||
* them on to the parsers.
|
||||
* @since 5.0
|
||||
*/
|
||||
public abstract class Token implements IToken {
|
||||
private int fKind;
|
||||
|
||||
int fOffset;
|
||||
int fEndOffset;
|
||||
private int fOffset;
|
||||
private int fEndOffset;
|
||||
|
||||
private IToken fNextGrammarToken;
|
||||
private IToken fNextToken;
|
||||
|
||||
Token(int kind, int offset, int endOffset) {
|
||||
fKind= kind;
|
||||
|
@ -43,46 +46,31 @@ public abstract class Token implements IToken {
|
|||
return fEndOffset-fOffset;
|
||||
}
|
||||
|
||||
|
||||
public IToken getNext() {
|
||||
return fNextGrammarToken;
|
||||
return fNextToken;
|
||||
}
|
||||
|
||||
public abstract char[] getTokenImage();
|
||||
|
||||
|
||||
// for the preprocessor to classify preprocessor tokens
|
||||
public void setType(int kind) {
|
||||
// mstodo make non-public
|
||||
fKind= kind;
|
||||
}
|
||||
|
||||
// for the preprocessor to chain the tokens
|
||||
|
||||
public void setNext(IToken t) {
|
||||
// mstodo make non-public
|
||||
fNextGrammarToken= t;
|
||||
fNextToken= t;
|
||||
}
|
||||
|
||||
public abstract char[] getCharImage();
|
||||
|
||||
|
||||
|
||||
|
||||
public boolean isOperator() {
|
||||
// mstodo
|
||||
return TokenUtil.isOperator(fKind);
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
// mstodo
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public String getImage() {
|
||||
// mstodo
|
||||
throw new UnsupportedOperationException();
|
||||
return new String(getCharImage());
|
||||
}
|
||||
|
||||
|
||||
|
||||
public char[] getFilename() {
|
||||
// mstodo
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -107,5 +95,59 @@ public abstract class Token implements IToken {
|
|||
// mstodo
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class SimpleToken extends Token {
|
||||
public SimpleToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
return TokenUtil.getImage(getType());
|
||||
}
|
||||
}
|
||||
|
||||
class DigraphToken extends Token {
|
||||
public DigraphToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
return TokenUtil.getDigraphImage(getType());
|
||||
}
|
||||
}
|
||||
|
||||
class ImageToken extends Token {
|
||||
private char[] fImage;
|
||||
|
||||
public ImageToken(int kind, int offset, int endOffset, char[] image) {
|
||||
super(kind, offset, endOffset);
|
||||
fImage= image;
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
return fImage;
|
||||
}
|
||||
}
|
||||
|
||||
class SourceImageToken extends Token {
|
||||
|
||||
private char[] fSource;
|
||||
private char[] fImage;
|
||||
|
||||
public SourceImageToken(int kind, int offset, int endOffset, char[] source) {
|
||||
super(kind, offset, endOffset);
|
||||
fSource= source;
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
if (fImage == null) {
|
||||
final int length= getLength();
|
||||
fImage= new char[length];
|
||||
System.arraycopy(fSource, getOffset(), fImage, 0, length);
|
||||
}
|
||||
return fImage;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -124,7 +124,6 @@ public class TokenUtil {
|
|||
case IGCCToken.tMAX: return Keywords.cpMAX;
|
||||
|
||||
default:
|
||||
assert false: type;
|
||||
return IMAGE_EMPTY;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
class TokenWithImage extends Token {
|
||||
|
||||
final private Lexer fLexer;
|
||||
final private int fImageLength;
|
||||
private char[] fImage;
|
||||
|
||||
public TokenWithImage(int kind, Lexer source, int offset, int endOffset, int imageLength) {
|
||||
super(kind, offset, endOffset);
|
||||
fLexer= source;
|
||||
fImageLength= imageLength;
|
||||
}
|
||||
|
||||
public TokenWithImage(int kind, int offset, int endOffset, char[] image) {
|
||||
super(kind, offset, endOffset);
|
||||
fLexer= null;
|
||||
fImageLength= 0;
|
||||
fImage= image;
|
||||
}
|
||||
|
||||
public char[] getTokenImage() {
|
||||
if (fImage == null) {
|
||||
fImage= fLexer.getTokenImage(fOffset, fEndOffset, fImageLength);
|
||||
}
|
||||
return fImage;
|
||||
}
|
||||
}
|
Loading…
Add table
Reference in a new issue