mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
CPreprocessor can replace DOMScanner.
This commit is contained in:
parent
67bcec2f0f
commit
fffaae4443
48 changed files with 977 additions and 737 deletions
|
@ -1,5 +1,5 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2005, 2006 IBM Corporation and others.
|
||||
* Copyright (c) 2005, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
|
@ -7,6 +7,7 @@
|
|||
*
|
||||
* Contributors:
|
||||
* IBM Corporation - initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
/*
|
||||
* Created on Jun 4, 2003
|
||||
|
@ -14,12 +15,15 @@
|
|||
*/
|
||||
package org.eclipse.cdt.core.model.tests;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestSuite;
|
||||
|
||||
import org.eclipse.cdt.core.model.CModelException;
|
||||
import org.eclipse.cdt.core.model.IInclude;
|
||||
import org.eclipse.cdt.core.model.ITranslationUnit;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
|
||||
/**
|
||||
* @author bnicolle
|
||||
|
@ -27,11 +31,14 @@ import org.eclipse.cdt.core.model.ITranslationUnit;
|
|||
*/
|
||||
public class IIncludeTests extends IntegratedCModelTest {
|
||||
|
||||
private boolean fUseCPreprocessor;
|
||||
|
||||
/**
|
||||
* @param string
|
||||
*/
|
||||
public IIncludeTests(String string) {
|
||||
super( string );
|
||||
fUseCPreprocessor= CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -69,57 +76,39 @@ public class IIncludeTests extends IntegratedCModelTest {
|
|||
assertNotNull("CModelException thrown",c);
|
||||
}
|
||||
|
||||
String getIncludeNameList[] = new String[] {
|
||||
new String("stdio.h"),
|
||||
new String("whatever.h"),
|
||||
new String("src/slash.h"),
|
||||
new String("src\\backslash.h"), // that's a single backslash, escaped
|
||||
new String("Program Files/space.h"),
|
||||
new String("../up1dir.h"),
|
||||
new String("./samedir.h"),
|
||||
new String("different_extension1.hpp"),
|
||||
new String("different_extension2.hh"),
|
||||
new String("different_extension3.x"),
|
||||
new String("no_extension"),
|
||||
new String("whitespace_after_hash"),
|
||||
new String("whitespace_before_hash"),
|
||||
new String("resync_after_bad_parse_1"),
|
||||
new String("resync_after_bad_parse_2"),
|
||||
new String("one"), // C-spec does not allow this, but that's OK for our present purposes
|
||||
new String("resync_after_bad_parse_3"),
|
||||
new String("invalid.h"), // C-spec does not allow this, but that's OK for our present purposes
|
||||
new String("myInclude1.h"),
|
||||
new String("vers2.h")
|
||||
};
|
||||
LinkedHashMap expectIncludes= new LinkedHashMap();
|
||||
expectIncludes.put("stdio.h", Boolean.TRUE);
|
||||
expectIncludes.put("whatever.h", Boolean.FALSE);
|
||||
expectIncludes.put("src/slash.h", Boolean.TRUE);
|
||||
expectIncludes.put("src\\backslash.h", Boolean.TRUE); // that's a single backslash, escaped
|
||||
expectIncludes.put("Program Files/space.h", Boolean.FALSE);
|
||||
expectIncludes.put("../up1dir.h", Boolean.FALSE);
|
||||
expectIncludes.put("./samedir.h", Boolean.FALSE);
|
||||
expectIncludes.put("different_extension1.hpp", Boolean.FALSE);
|
||||
expectIncludes.put("different_extension2.hh", Boolean.FALSE);
|
||||
expectIncludes.put("different_extension3.x", Boolean.FALSE);
|
||||
expectIncludes.put("no_extension", Boolean.TRUE);
|
||||
expectIncludes.put("whitespace_after_hash", Boolean.FALSE);
|
||||
expectIncludes.put("whitespace_before_hash", Boolean.FALSE);
|
||||
expectIncludes.put("resync_after_bad_parse_1", Boolean.FALSE);
|
||||
expectIncludes.put("resync_after_bad_parse_2", Boolean.FALSE);
|
||||
expectIncludes.put("one", Boolean.FALSE); // C-spec does not allow this, gcc warns and includes, so we should include it, also.
|
||||
expectIncludes.put("resync_after_bad_parse_3", Boolean.FALSE);
|
||||
if (!fUseCPreprocessor) {
|
||||
expectIncludes.put("invalid.h", Boolean.FALSE); // C-spec does not allow this, but that's OK for our present purposes
|
||||
}
|
||||
expectIncludes.put("myInclude1.h", Boolean.FALSE);
|
||||
expectIncludes.put("vers2.h", Boolean.FALSE);
|
||||
|
||||
String[] getIncludeNameList= (String[]) expectIncludes.keySet().toArray(new String[expectIncludes.size()]);
|
||||
assertEquals( getIncludeNameList.length, theIncludes.length );
|
||||
for( int i=0; i<getIncludeNameList.length; i++ )
|
||||
{
|
||||
IInclude inc1 = theIncludes[i];
|
||||
assertEquals( getIncludeNameList[i], inc1.getIncludeName() );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testIsStandard() throws CModelException
|
||||
{
|
||||
ITranslationUnit tu = getTU();
|
||||
IInclude[] theIncludes = null;
|
||||
try {
|
||||
theIncludes = tu.getIncludes();
|
||||
}
|
||||
catch( CModelException c )
|
||||
{
|
||||
assertNotNull("CModelException thrown",c);
|
||||
}
|
||||
boolean isStandardList[] = new boolean[] {
|
||||
true, false
|
||||
};
|
||||
for( int i=0; i<isStandardList.length; i++ )
|
||||
{
|
||||
IInclude inc1 = theIncludes[i];
|
||||
assertEquals( isStandardList[i], inc1.isStandard() );
|
||||
String expectName= getIncludeNameList[i];
|
||||
assertEquals( expectName, inc1.getIncludeName() );
|
||||
assertEquals( ((Boolean) expectIncludes.get(expectName)).booleanValue(), inc1.isStandard());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM Rational Software - Initial API and implementation
|
||||
* IBM Rational Software - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.parser.tests.ast2;
|
||||
|
||||
|
@ -14,6 +15,7 @@ import java.io.StringWriter;
|
|||
import java.io.Writer;
|
||||
|
||||
import org.eclipse.cdt.core.dom.IName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IBinding;
|
||||
|
@ -1531,8 +1533,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
|||
decls = getDeclarationOffTU((IASTName)node);
|
||||
assertEquals(decls.length, 1);
|
||||
assertEquals( decls[0].toString(), "test" ); //$NON-NLS-1$
|
||||
assertEquals( ((ASTNode)decls[0]).getOffset(), 132);
|
||||
assertEquals( ((ASTNode)decls[0]).getLength(), 4);
|
||||
assertLocation(code, "test:", 4, decls[0]);
|
||||
}
|
||||
|
||||
public void testBugMethodDef() throws Exception {
|
||||
|
@ -1694,7 +1695,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
|||
buffer.append("char c; // selection on this fails because offset for \n"); //$NON-NLS-1$
|
||||
buffer.append("_END_STD_C\n"); //$NON-NLS-1$
|
||||
buffer.append("char foo() {\n"); //$NON-NLS-1$
|
||||
buffer.append("return c; \n"); //$NON-NLS-1$
|
||||
buffer.append("return c; // ref \n"); //$NON-NLS-1$
|
||||
buffer.append("}\n"); //$NON-NLS-1$
|
||||
|
||||
String code = buffer.toString();
|
||||
|
@ -1707,8 +1708,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
|||
IName[] decls = getDeclarationOffTU((IASTName)node);
|
||||
assertEquals(decls.length, 1);
|
||||
assertEquals( decls[0].toString(), "c" ); //$NON-NLS-1$
|
||||
assertEquals( ((ASTNode)decls[0]).getOffset(), 86);
|
||||
assertEquals( ((ASTNode)decls[0]).getLength(), 1);
|
||||
assertLocation(code, "c;", 1, decls[0]);
|
||||
|
||||
index = code.indexOf("char c"); //$NON-NLS-1$
|
||||
node = parse( code, index + 5, index + 6, true );
|
||||
|
@ -1718,11 +1718,17 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
|||
IName[] refs = getReferencesOffTU((IASTName)node);
|
||||
assertEquals(refs.length, 1);
|
||||
assertEquals( refs[0].toString(), "c" ); //$NON-NLS-1$
|
||||
assertEquals( ((ASTNode)refs[0]).getOffset(), 168);
|
||||
assertEquals( ((ASTNode)decls[0]).getLength(), 1);
|
||||
assertLocation(code, "c; // ref", 1, refs[0]);
|
||||
}
|
||||
|
||||
public void testBug92632() throws Exception
|
||||
private void assertLocation(String code, String occur, int length, IName name) {
|
||||
int offset= code.indexOf(occur);
|
||||
final IASTFileLocation loc= name.getFileLocation();
|
||||
assertEquals(offset, loc.getNodeOffset());
|
||||
assertEquals(length, loc.getNodeLength());
|
||||
}
|
||||
|
||||
public void testBug92632() throws Exception
|
||||
{
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("namespace N{ \n"); //$NON-NLS-1$
|
||||
|
|
|
@ -105,7 +105,7 @@ public class LexerTests extends BaseTestCase {
|
|||
|
||||
private void eof() throws Exception {
|
||||
IToken t= fLexer.nextToken();
|
||||
assertEquals("superfluous token " + t, Lexer.tEND_OF_INPUT, t.getType());
|
||||
assertEquals("superfluous token " + t, IToken.tEND_OF_INPUT, t.getType());
|
||||
assertEquals(0, fLog.getProblemCount());
|
||||
assertEquals(0, fLog.getCommentCount());
|
||||
}
|
||||
|
|
|
@ -139,11 +139,17 @@ public class LocationMapTests extends BaseTestCase {
|
|||
}
|
||||
|
||||
private void checkLocation(IASTFileLocation loc, String filename, int offset, int length, int line, int endline) {
|
||||
assertEquals(filename, loc.getFileName());
|
||||
assertEquals(offset, loc.getNodeOffset());
|
||||
assertEquals(length, loc.getNodeLength());
|
||||
assertEquals(line, loc.getStartingLineNumber());
|
||||
assertEquals(endline, loc.getEndingLineNumber());
|
||||
if (loc == null) {
|
||||
assertEquals(0, offset);
|
||||
assertEquals(0, length);
|
||||
}
|
||||
else {
|
||||
assertEquals(filename, loc.getFileName());
|
||||
assertEquals(offset, loc.getNodeOffset());
|
||||
assertEquals(length, loc.getNodeLength());
|
||||
assertEquals(line, loc.getStartingLineNumber());
|
||||
assertEquals(endline, loc.getEndingLineNumber());
|
||||
}
|
||||
}
|
||||
|
||||
private void checkComment(IASTComment comment, String content, boolean blockComment,
|
||||
|
@ -319,7 +325,7 @@ public class LocationMapTests extends BaseTestCase {
|
|||
IASTPreprocessorIncludeStatement[] includes= fLocationMap.getIncludeDirectives();
|
||||
assertEquals(2, includes.length);
|
||||
checkInclude(includes[0], "", "", "n1", "", true, false, FN, 0, 0, 1, 0, 0);
|
||||
checkInclude(includes[1], "012", "12", "n2", "f2", false, true, FN, 0, 3, 1, 1, 2);
|
||||
checkInclude(includes[1], new String(DIGITS), "12", "n2", "f2", false, true, FN, 0, 16, 1, 1, 2);
|
||||
}
|
||||
|
||||
public void testIf() {
|
||||
|
@ -434,7 +440,6 @@ public class LocationMapTests extends BaseTestCase {
|
|||
}
|
||||
|
||||
public void testMacroExpansion() {
|
||||
ImageLocationInfo ili= new ImageLocationInfo();
|
||||
IMacroBinding macro1= new TestMacro("n1", "exp1", null);
|
||||
IMacroBinding macro2= new TestMacro("n2", "exp2", null);
|
||||
IMacroBinding macro3= new TestMacro("n3", "exp3", null);
|
||||
|
@ -444,8 +449,8 @@ public class LocationMapTests extends BaseTestCase {
|
|||
fLocationMap.registerPredefinedMacro(macro1);
|
||||
fLocationMap.registerMacroFromIndex(macro2, "ifile", 2, 12, 32);
|
||||
fLocationMap.encounterPoundDefine(3, 13, 33, 63, 103, macro3);
|
||||
IASTName name1= fLocationMap.encounterImplicitMacroExpansion(macro1, ili);
|
||||
IASTName name2= fLocationMap.encounterImplicitMacroExpansion(macro2, ili);
|
||||
IASTName name1= fLocationMap.encounterImplicitMacroExpansion(macro1, null);
|
||||
IASTName name2= fLocationMap.encounterImplicitMacroExpansion(macro2, null);
|
||||
fLocationMap.pushMacroExpansion(110, 115, 125, 30, macro3, new IASTName[]{name1, name2}, new ImageLocationInfo[0]);
|
||||
fLocationMap.encounteredComment(12, 23, false);
|
||||
checkComment(fLocationMap.getComments()[0], new String(LONGDIGITS, 110, 15), false, FN, 110, 15, 2, 2);
|
||||
|
@ -551,7 +556,7 @@ public class LocationMapTests extends BaseTestCase {
|
|||
|
||||
inclusions= inclusions[0].getNestedInclusions();
|
||||
assertEquals(1, inclusions.length);
|
||||
checkInclude(inclusions[0].getIncludeDirective(), "b4", "4", "pre11", "pre11", false, true, "pre1", 6, 2, 1, 7, 1);
|
||||
checkInclude(inclusions[0].getIncludeDirective(), "b4b", "4", "pre11", "pre11", false, true, "pre1", 6, 3, 1, 7, 1);
|
||||
assertEquals(0, inclusions[0].getNestedInclusions().length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
package org.eclipse.cdt.core.model;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ICodeReaderFactory;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.index.IIndex;
|
||||
import org.eclipse.cdt.core.parser.CodeReader;
|
||||
|
@ -38,6 +39,13 @@ public abstract class AbstractLanguage extends PlatformObject implements ILangua
|
|||
*/
|
||||
public final static int OPTION_ADD_COMMENTS= 2;
|
||||
|
||||
/**
|
||||
* Option for {@link #getASTTranslationUnit(CodeReader, IScannerInfo, ICodeReaderFactory, IIndex, int, IParserLogService)}
|
||||
* Performance optimization, instructs the parser not to create image-locations.
|
||||
* When using this option {@link IASTName#getImageLocation()} will always return <code>null</code>.
|
||||
*/
|
||||
public final static int OPTION_NO_IMAGE_LOCATIONS= 4;
|
||||
|
||||
/**
|
||||
* @deprecated, throws an UnsupportedOperationException
|
||||
*/
|
||||
|
|
|
@ -215,7 +215,7 @@ public class AsmModelBuilder implements IContributedModelBuilder {
|
|||
Token token;
|
||||
try {
|
||||
token= fLexer.nextToken();
|
||||
if (token.getType() == Lexer.tEND_OF_INPUT) {
|
||||
if (token.getType() == IToken.tEND_OF_INPUT) {
|
||||
token = null;
|
||||
}
|
||||
} catch (OffsetLimitReachedException exc) {
|
||||
|
|
|
@ -727,7 +727,7 @@ public class ASTSignatureUtil {
|
|||
StringBuffer result = new StringBuffer();
|
||||
result.append(expression.toString());
|
||||
if (expression.getKind() == IASTLiteralExpression.lk_string_literal) {
|
||||
// mstodo- support for old scanner
|
||||
// mstodo- old scanner, remove
|
||||
if (result.length() == 0 || result.charAt(0) != '"') {
|
||||
result.insert(0, '"');
|
||||
result.append('"');
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.dom.ast;
|
||||
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* <strong>EXPERIMENTAL</strong>. This class or interface has been added as
|
||||
* part of a work in progress. There is no guarantee that this API will
|
||||
* work or that it will remain the same. Please do not use this API without
|
||||
* consulting with the CDT team.
|
||||
* </p>
|
||||
* @since 5.0
|
||||
*
|
||||
* An image location explains how a name made it into the translation unit.
|
||||
*/
|
||||
public interface IASTImageLocation extends IASTFileLocation {
|
||||
|
||||
/**
|
||||
* The image is part of the code that has not been modified by the preprocessor.
|
||||
*/
|
||||
final int REGULAR_CODE= 1;
|
||||
/**
|
||||
* The image is part of a macro definition and was introduced by some macro expansion.
|
||||
*/
|
||||
final int MACRO_DEFINITION= 2;
|
||||
/**
|
||||
* The image is part of an argument of an explicit macro expansion.
|
||||
*/
|
||||
final int ARGUMENT_TO_MACRO_EXPANSION= 3;
|
||||
|
||||
/**
|
||||
* Returns the kind of image-location, one of {@link #REGULAR_CODE}, {@link #MACRO_DEFINITION} or
|
||||
* {@link #ARGUMENT_TO_MACRO_EXPANSION}.
|
||||
*/
|
||||
public int getLocationKind();
|
||||
}
|
|
@ -62,4 +62,18 @@ public interface IASTName extends IASTNode, IName {
|
|||
* Determines the current linkage in which the name has to be resolved.
|
||||
*/
|
||||
public ILinkage getLinkage();
|
||||
|
||||
/**
|
||||
* Returns the image location for this name or <code>null</code> if the information is not available.
|
||||
* <p>
|
||||
* An image location can be computed when the name is either found directly in the code, is (part of)
|
||||
* an argument to a macro expansion or is (part of) a macro definition found in the source code.
|
||||
* <p>
|
||||
* The image location is <code>null</code>, when the name consists of multiple tokens (qualified names)
|
||||
* and the tokens are not found side by side in the code or if
|
||||
* the name is the result of a token-paste operation or the name is found in the definition of a
|
||||
* built-in macro.
|
||||
* @since 5.0
|
||||
*/
|
||||
public IASTImageLocation getImageLocation();
|
||||
}
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2004, 2005 IBM Corporation and others.
|
||||
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM - Initial API and implementation
|
||||
* IBM - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.dom.ast;
|
||||
|
||||
|
@ -46,6 +47,13 @@ public interface IASTPreprocessorMacroDefinition extends
|
|||
*/
|
||||
public String getExpansion();
|
||||
|
||||
/**
|
||||
* Returns the location of the macro expansion, or <code>null</code> if not supported.
|
||||
* For built-in macros the location will always be null.
|
||||
* @since 5.0
|
||||
*/
|
||||
public IASTFileLocation getExpansionLocation();
|
||||
|
||||
/**
|
||||
* Set the macro expansion.
|
||||
*
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.eclipse.cdt.core.dom.ast.IASTName;
|
|||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
import org.eclipse.cdt.core.dom.ast.c.CASTVisitor;
|
||||
import org.eclipse.cdt.core.dom.parser.AbstractScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.IScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.ISourceCodeParser;
|
||||
import org.eclipse.cdt.core.index.IIndex;
|
||||
|
@ -39,6 +40,7 @@ import org.eclipse.cdt.core.parser.ParserLanguage;
|
|||
import org.eclipse.cdt.core.parser.ParserMode;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
||||
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
||||
|
@ -58,7 +60,6 @@ import org.eclipse.core.runtime.CoreException;
|
|||
* </p>
|
||||
*
|
||||
* @see AbstractScannerExtensionConfiguration
|
||||
* @see AbstractCParserExtensionConfiguration
|
||||
*
|
||||
* @since 4.0
|
||||
*/
|
||||
|
@ -98,6 +99,8 @@ public abstract class AbstractCLanguage extends AbstractLanguage implements ICLa
|
|||
|
||||
IScanner scanner= createScanner(reader, scanInfo, codeReaderFactory, log);
|
||||
scanner.setScanComments((options & OPTION_ADD_COMMENTS) != 0);
|
||||
scanner.setComputeImageLocations((options & AbstractLanguage.OPTION_NO_IMAGE_LOCATIONS) == 0);
|
||||
|
||||
ISourceCodeParser parser= createParser(scanner, log, index, false, options);
|
||||
|
||||
// Parse
|
||||
|
@ -149,6 +152,9 @@ public abstract class AbstractCLanguage extends AbstractLanguage implements ICLa
|
|||
* @return an instance of IScanner
|
||||
*/
|
||||
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
||||
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||
return new CPreprocessor(reader, scanInfo, ParserLanguage.C, log, getScannerExtensionConfiguration(), fileCreator);
|
||||
}
|
||||
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.C,
|
||||
log, getScannerExtensionConfiguration(), fileCreator);
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.eclipse.cdt.core.parser.ParserLanguage;
|
|||
import org.eclipse.cdt.core.parser.ParserMode;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
||||
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
||||
|
@ -147,6 +148,9 @@ public abstract class AbstractCPPLanguage extends AbstractLanguage implements IC
|
|||
* @return an instance of IScanner
|
||||
*/
|
||||
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
||||
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||
return new CPreprocessor(reader, scanInfo, ParserLanguage.CPP, log, getScannerExtensionConfiguration(), fileCreator);
|
||||
}
|
||||
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.CPP,
|
||||
log, getScannerExtensionConfiguration(), fileCreator);
|
||||
}
|
||||
|
|
|
@ -42,6 +42,13 @@ public interface IScanner extends IMacroCollector {
|
|||
*/
|
||||
public void setScanComments(boolean val);
|
||||
|
||||
/**
|
||||
* Turns on/off creation of image locations.
|
||||
* @see IASTName#getImageLocation().
|
||||
* @since 5.0
|
||||
*/
|
||||
public void setComputeImageLocations(boolean val);
|
||||
|
||||
public IMacro addDefinition(char[] key, char[] value);
|
||||
public IMacro addDefinition(char[] name, char[][] params, char[] expansion);
|
||||
public void addDefinition(IMacro macro);
|
||||
|
|
|
@ -242,8 +242,9 @@ public interface IToken {
|
|||
static public final int tBLOCKCOMMENT = 143;
|
||||
/** @deprecated don't use it */
|
||||
static public final int tLAST = 143;
|
||||
static public final int tEND_OF_INPUT= 144;
|
||||
|
||||
int FIRST_RESERVED_IGCCToken = 144;
|
||||
int FIRST_RESERVED_IGCCToken = 150;
|
||||
int LAST_RESERVED_IGCCToken = 199;
|
||||
|
||||
int FIRST_RESERVED_IExtensionToken = 243;
|
||||
|
|
|
@ -14,6 +14,7 @@ package org.eclipse.cdt.internal.core.dom.parser;
|
|||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||
import org.eclipse.cdt.core.dom.ast.ASTVisitor;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||
|
@ -97,6 +98,17 @@ public abstract class ASTNode implements IASTNode {
|
|||
return locations;
|
||||
}
|
||||
|
||||
public IASTImageLocation getImageLocation() {
|
||||
final IASTTranslationUnit tu= getTranslationUnit();
|
||||
if (tu != null) {
|
||||
ILocationResolver l= (ILocationResolver) tu.getAdapter(ILocationResolver.class);
|
||||
if (l != null) {
|
||||
return l.getImageLocation(offset, length);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getRawSignature() {
|
||||
final IASTFileLocation floc= getFileLocation();
|
||||
final IASTTranslationUnit ast = getTranslationUnit();
|
||||
|
@ -106,7 +118,7 @@ public abstract class ASTNode implements IASTNode {
|
|||
return new String(lr.getUnpreprocessedSignature(getFileLocation()));
|
||||
}
|
||||
else {
|
||||
// mstodo- support for old location map
|
||||
// mstodo- old location resolver, remove
|
||||
return ast.getUnpreprocessedSignature(getNodeLocations());
|
||||
}
|
||||
}
|
||||
|
@ -120,6 +132,9 @@ public abstract class ASTNode implements IASTNode {
|
|||
public IASTFileLocation getFileLocation() {
|
||||
if( fileLocation != null )
|
||||
return fileLocation;
|
||||
if (offset == 0 && length == 0) {
|
||||
return null;
|
||||
}
|
||||
IASTTranslationUnit ast = getTranslationUnit();
|
||||
if (ast != null) {
|
||||
ILocationResolver lr= (ILocationResolver) ast.getAdapter(ILocationResolver.class);
|
||||
|
|
|
@ -154,7 +154,7 @@ public abstract class AbstractGNUSourceCodeParser implements ISourceCodeParser {
|
|||
|
||||
// Use to create the completion node
|
||||
protected ASTCompletionNode createCompletionNode(IToken token) {
|
||||
if (completionNode == null)
|
||||
if (completionNode == null && token != null)
|
||||
completionNode = new ASTCompletionNode(token, getTranslationUnit());
|
||||
return completionNode;
|
||||
}
|
||||
|
@ -284,6 +284,7 @@ public abstract class AbstractGNUSourceCodeParser implements ISourceCodeParser {
|
|||
OffsetLimitReachedException exception) throws EndOfFileException {
|
||||
if (mode != ParserMode.COMPLETION_PARSE)
|
||||
throw new EndOfFileException();
|
||||
createCompletionNode(exception.getFinalToken());
|
||||
throw exception;
|
||||
}
|
||||
|
||||
|
|
|
@ -409,7 +409,7 @@ public class CASTTranslationUnit extends CASTNode implements
|
|||
return result;
|
||||
}
|
||||
|
||||
// mstodo- support for old location resolver
|
||||
// mstodo- old location resolver remove
|
||||
IASTNode node = null;
|
||||
ASTPreprocessorSelectionResult result = null;
|
||||
int globalOffset = 0;
|
||||
|
|
|
@ -399,7 +399,7 @@ public class CPPASTTranslationUnit extends CPPASTNode implements
|
|||
return result;
|
||||
}
|
||||
|
||||
// mstodo- support for old location resolver
|
||||
// mstodo- old location resolver, remove
|
||||
IASTNode node = null;
|
||||
ASTPreprocessorSelectionResult result = null;
|
||||
int globalOffset = 0;
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.eclipse.cdt.core.dom.ILinkage;
|
|||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
|
@ -115,10 +116,6 @@ class ASTBuiltinName extends ASTPreprocessorDefinition {
|
|||
return new IASTNodeLocation[]{fFileLocation};
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public String getRawSignature() {
|
||||
if (fFileLocation == null) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -128,13 +125,27 @@ class ASTBuiltinName extends ASTPreprocessorDefinition {
|
|||
}
|
||||
|
||||
class ASTMacroReferenceName extends ASTPreprocessorName {
|
||||
private ImageLocationInfo fImageLocationInfo;
|
||||
|
||||
public ASTMacroReferenceName(IASTNode parent, int offset, int endOffset, IMacroBinding macro, ImageLocationInfo imgLocationInfo) {
|
||||
super(parent, IASTTranslationUnit.EXPANSION_NAME, offset, endOffset, macro.getNameCharArray(), macro);
|
||||
fImageLocationInfo= imgLocationInfo;
|
||||
}
|
||||
|
||||
public boolean isReference() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// mstodo- image-locations.
|
||||
|
||||
public IASTImageLocation getImageLocation() {
|
||||
if (fImageLocationInfo != null) {
|
||||
IASTTranslationUnit tu= getTranslationUnit();
|
||||
if (tu != null) {
|
||||
LocationMap lr= (LocationMap) tu.getAdapter(LocationMap.class);
|
||||
if (lr != null) {
|
||||
return fImageLocationInfo.createLocation(lr, fImageLocationInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
|||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFunctionStyleMacroParameter;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTMacroExpansion;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
|
@ -67,10 +68,6 @@ abstract class ASTPreprocessorNode extends ASTNode {
|
|||
return CharArrayUtils.EMPTY;
|
||||
}
|
||||
|
||||
public IASTNodeLocation[] getNodeLocations() {
|
||||
return super.getNodeLocations();
|
||||
}
|
||||
|
||||
public String getContainingFilename() {
|
||||
if (super.getOffset() == -1) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -228,9 +225,10 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
|||
private final boolean fIsResolved;
|
||||
private final boolean fIsSystemInclude;
|
||||
|
||||
public ASTInclusionStatement(IASTTranslationUnit parent, int startNumber, int nameStartNumber, int nameEndNumber,
|
||||
public ASTInclusionStatement(IASTTranslationUnit parent,
|
||||
int startNumber, int nameStartNumber, int nameEndNumber, int endNumber,
|
||||
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, nameEndNumber);
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
||||
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
||||
fIsActive= active;
|
||||
|
@ -270,6 +268,7 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
|||
|
||||
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyleMacroDefinition {
|
||||
private final ASTPreprocessorName fName;
|
||||
private final int fExpansionNumber;
|
||||
|
||||
/**
|
||||
* Regular constructor.
|
||||
|
@ -277,6 +276,7 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
|||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
||||
int startNumber, int nameNumber, int nameEndNumber, int expansionNumber, int endNumber) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||
fExpansionNumber= expansionNumber;
|
||||
fName= new ASTPreprocessorDefinition(this, IASTPreprocessorMacroDefinition.MACRO_NAME, nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
||||
}
|
||||
|
||||
|
@ -287,6 +287,7 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
|||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro, String filename, int nameOffset, int nameEndOffset, int expansionOffset) {
|
||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, -1, -1);
|
||||
fName= new ASTBuiltinName(this, IASTPreprocessorMacroDefinition.MACRO_NAME, filename, nameOffset, nameEndOffset, macro.getNameCharArray(), macro);
|
||||
fExpansionNumber= -1;
|
||||
}
|
||||
|
||||
protected IMacroBinding getMacro() {
|
||||
|
@ -316,6 +317,19 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
|||
|
||||
public void setExpansion(String exp) {assert false;}
|
||||
public void setName(IASTName name) {assert false;}
|
||||
|
||||
public IASTFileLocation getExpansionLocation() {
|
||||
if (fExpansionNumber >= 0) {
|
||||
IASTTranslationUnit ast = getTranslationUnit();
|
||||
if (ast != null) {
|
||||
ILocationResolver lr= (ILocationResolver) ast.getAdapter(ILocationResolver.class);
|
||||
if (lr != null) {
|
||||
return lr.getMappedFileLocation(fExpansionNumber, getOffset() + getLength() - fExpansionNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class ASTMacroParameter extends ASTPreprocessorNode implements IASTFunctionStyleMacroParameter {
|
||||
|
@ -502,6 +516,10 @@ class ASTMacroExpansionLocation implements IASTMacroExpansion {
|
|||
public String toString() {
|
||||
return fContext.getMacroDefinition().getName().toString() + "[" + fOffset + "," + (fOffset+fLength) + ")"; //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
|
||||
}
|
||||
|
||||
public IASTImageLocation getImageLocation() {
|
||||
return fContext.getImageLocation(fOffset, fLength);
|
||||
}
|
||||
}
|
||||
|
||||
class ASTFileLocationForBuiltins implements IASTFileLocation {
|
||||
|
@ -541,3 +559,16 @@ class ASTFileLocationForBuiltins implements IASTFileLocation {
|
|||
}
|
||||
|
||||
|
||||
class ASTImageLocation extends ASTFileLocationForBuiltins implements IASTImageLocation {
|
||||
private final int fKind;
|
||||
|
||||
public ASTImageLocation(int kind, String file, int offset, int length) {
|
||||
super(file, offset, length);
|
||||
fKind= kind;
|
||||
}
|
||||
|
||||
public int getLocationKind() {
|
||||
return fKind;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -168,14 +168,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
private final MacroExpander fMacroExpander;
|
||||
|
||||
// configuration
|
||||
final private ParserLanguage fLanguage;
|
||||
final private LexerOptions fLexOptions= new LexerOptions();
|
||||
private boolean fCheckNumbers;
|
||||
final private char[] fAdditionalNumericLiteralSuffixes;
|
||||
final private CharArrayIntMap fKeywords;
|
||||
final private CharArrayIntMap fPPKeywords;
|
||||
final private String[] fIncludePaths;
|
||||
final private String[] fQuoteIncludePaths;
|
||||
private String[][] fPreIncludedFiles= null;
|
||||
|
||||
private int fContentAssistLimit= -1;
|
||||
|
||||
|
@ -193,15 +192,12 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
|
||||
private boolean isCancelled = false;
|
||||
|
||||
private Token fPrefetchedToken;
|
||||
private Token fPrefetchedTokens;
|
||||
private Token fLastToken;
|
||||
private boolean fExpandingMacro;
|
||||
|
||||
public CPreprocessor(CodeReader reader, IScannerInfo info, ParserLanguage language, IParserLogService log,
|
||||
IScannerExtensionConfiguration configuration, ICodeReaderFactory readerFactory) {
|
||||
fLanguage= language;
|
||||
fLog = log;
|
||||
fCheckNumbers= true;
|
||||
fAdditionalNumericLiteralSuffixes= nonNull(configuration.supportAdditionalNumericLiteralSuffixes());
|
||||
fLexOptions.fSupportDollarInitializers= configuration.support$InIdentifiers();
|
||||
fLexOptions.fSupportMinAndMax = configuration.supportMinAndMaxOperators();
|
||||
|
@ -217,28 +213,28 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
fMacroExpander= new MacroExpander(this, fMacroDictionary, fLocationMap, fMacroDefinitionParser, fLexOptions);
|
||||
fCodeReaderFactory= readerFactory;
|
||||
|
||||
setupMacroDictionary(configuration, info);
|
||||
setupMacroDictionary(configuration, info, language);
|
||||
|
||||
final String filePath= new String(reader.filename);
|
||||
fAllIncludedFiles.add(filePath);
|
||||
ILocationCtx ctx= fLocationMap.pushTranslationUnit(filePath, reader.buffer);
|
||||
fRootLexer= new Lexer(reader.buffer, (LexerOptions) fLexOptions.clone(), this, this);
|
||||
fRootContext= fCurrentContext= new ScannerContextFile(ctx, null, fRootLexer);
|
||||
fRootLexer= new Lexer(reader.buffer, fLexOptions, this, this);
|
||||
fRootContext= fCurrentContext= new ScannerContext(ctx, null, fRootLexer);
|
||||
if (info instanceof IExtendedScannerInfo) {
|
||||
final IExtendedScannerInfo einfo= (IExtendedScannerInfo) info;
|
||||
|
||||
// files provided on command line (-imacros, -include)
|
||||
registerPreIncludedFiles(einfo.getMacroFiles(), einfo.getIncludeFiles());
|
||||
fPreIncludedFiles= new String[][] {einfo.getMacroFiles(), einfo.getIncludeFiles()};
|
||||
}
|
||||
}
|
||||
|
||||
public void setComputeImageLocations(boolean val) {
|
||||
fLexOptions.fCreateImageLocations= val;
|
||||
}
|
||||
|
||||
public void setContentAssistMode(int offset) {
|
||||
fContentAssistLimit= offset;
|
||||
fRootLexer.setContentAssistMode(offset);
|
||||
}
|
||||
|
||||
|
||||
// mstodo scanner integration, keywords should be provided directly by the language
|
||||
private void configureKeywords(ParserLanguage language, IScannerExtensionConfiguration configuration) {
|
||||
Keywords.addKeywordsPreprocessor(fPPKeywords);
|
||||
if (language == ParserLanguage.C) {
|
||||
|
@ -279,7 +275,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
return info.getIncludePaths();
|
||||
}
|
||||
|
||||
private void setupMacroDictionary(IScannerExtensionConfiguration config, IScannerInfo info) {
|
||||
private void setupMacroDictionary(IScannerExtensionConfiguration config, IScannerInfo info, ParserLanguage lang) {
|
||||
// built in macros
|
||||
fMacroDictionary.put(__STDC__.getNameCharArray(), __STDC__);
|
||||
fMacroDictionary.put(__FILE__.getNameCharArray(), __FILE__);
|
||||
|
@ -287,7 +283,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
fMacroDictionary.put(__TIME__.getNameCharArray(), __TIME__);
|
||||
fMacroDictionary.put(__LINE__.getNameCharArray(), __LINE__);
|
||||
|
||||
if (fLanguage == ParserLanguage.CPP)
|
||||
if (lang == ParserLanguage.CPP)
|
||||
fMacroDictionary.put(__cplusplus.getNameCharArray(), __cplusplus);
|
||||
else {
|
||||
fMacroDictionary.put(__STDC_HOSTED__.getNameCharArray(), __STDC_HOSTED__);
|
||||
|
@ -322,20 +318,33 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
}
|
||||
|
||||
private void registerPreIncludedFiles(final String[] macroFiles, final String[] preIncludedFiles) {
|
||||
if (preIncludedFiles != null && preIncludedFiles.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(preIncludedFiles);
|
||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, false);
|
||||
fCurrentContext= new ScannerContextFile(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
}
|
||||
|
||||
if (macroFiles != null && macroFiles.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(macroFiles);
|
||||
private void handlePreIncludedFiles() {
|
||||
final String[] imacro= fPreIncludedFiles[0];
|
||||
if (imacro != null && imacro.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(imacro);
|
||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, true);
|
||||
fCurrentContext= new ScannerContextMacroFile(this, ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
fCurrentContext= new ScannerContext(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
ScannerContext preCtx= fCurrentContext;
|
||||
try {
|
||||
while(internalFetchToken(true, false, true, preCtx).getType() != IToken.tEND_OF_INPUT) {
|
||||
// just eat the tokens
|
||||
}
|
||||
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||
fLocationMap.popContext(locationCtx);
|
||||
fCurrentContext= fCurrentContext.getParent();
|
||||
assert fCurrentContext == fRootContext;
|
||||
} catch (OffsetLimitReachedException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final String[] include= fPreIncludedFiles[1];
|
||||
if (include != null && include.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(include);
|
||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, false);
|
||||
fCurrentContext= new ScannerContext(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
}
|
||||
fPreIncludedFiles= null;
|
||||
}
|
||||
|
||||
private char[] createSyntheticFile(String[] files) {
|
||||
int totalLength= 0;
|
||||
final char[] instruction= "#include <".toCharArray(); //$NON-NLS-1$
|
||||
|
@ -356,19 +365,6 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
return buffer;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Check if the given inclusion was already included before.
|
||||
*
|
||||
* @param inclusionData
|
||||
* @return
|
||||
*/
|
||||
// private boolean isRepeatedInclusion(InclusionData inclusionData) {
|
||||
// return includedFiles.containsKey(inclusionData.reader.filename);
|
||||
// }
|
||||
|
||||
public PreprocessorMacro addMacroDefinition(char[] key, char[] value) {
|
||||
final Lexer lex= new Lexer(key, fLexOptions, LEXERLOG_NULL, null);
|
||||
try {
|
||||
|
@ -411,27 +407,78 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns next token for the parser.
|
||||
* @throws OffsetLimitReachedException
|
||||
* Returns the next token from the preprocessor without concatenating string literals.
|
||||
*/
|
||||
private Token fetchToken() throws OffsetLimitReachedException {
|
||||
if (fPreIncludedFiles != null) {
|
||||
handlePreIncludedFiles();
|
||||
}
|
||||
Token t= fPrefetchedTokens;
|
||||
if (t != null) {
|
||||
fPrefetchedTokens= (Token) t.getNext();
|
||||
t.setNext(null);
|
||||
return t;
|
||||
}
|
||||
|
||||
t= internalFetchToken(true, false, true, fRootContext);
|
||||
final int offset= fLocationMap.getSequenceNumberForOffset(t.getOffset());
|
||||
final int endOffset= fLocationMap.getSequenceNumberForOffset(t.getEndOffset());
|
||||
t.setOffset(offset, endOffset);
|
||||
t.setNext(null);
|
||||
return t;
|
||||
}
|
||||
|
||||
private void pushbackToken(Token t) {
|
||||
t.setNext(fPrefetchedTokens);
|
||||
fPrefetchedTokens= t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns next token for the parser. String literals are not concatenated. When
|
||||
* the end is reached tokens with type {@link IToken#tEND_OF_INPUT}.
|
||||
* @throws OffsetLimitReachedException see {@link Lexer}.
|
||||
*/
|
||||
public IToken nextTokenRaw() throws OffsetLimitReachedException {
|
||||
if (isCancelled) {
|
||||
throw new ParseError(ParseError.ParseErrorKind.TIMEOUT_OR_CANCELLED);
|
||||
}
|
||||
|
||||
Token t1= fetchToken();
|
||||
if (t1.getType() == IToken.tEND_OF_INPUT) {
|
||||
if (fContentAssistLimit >= 0) {
|
||||
int useType= IToken.tCOMPLETION;
|
||||
if (fLastToken != null) {
|
||||
final int lt= fLastToken.getType();
|
||||
if (lt == IToken.tCOMPLETION || lt == IToken.tEOC) {
|
||||
useType= IToken.tEOC;
|
||||
}
|
||||
}
|
||||
int sequenceNumber= fLocationMap.getSequenceNumberForOffset(fContentAssistLimit);
|
||||
t1= new Token(useType, null, sequenceNumber, sequenceNumber);
|
||||
}
|
||||
}
|
||||
if (fLastToken != null) {
|
||||
fLastToken.setNext(t1);
|
||||
}
|
||||
fLastToken= t1;
|
||||
return t1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns next token for the parser. String literals are concatenated.
|
||||
* @throws EndOfFileException when the end of the translation unit has been reached.
|
||||
* @throws OffsetLimitReachedException see {@link Lexer}.
|
||||
*/
|
||||
public IToken nextToken() throws EndOfFileException {
|
||||
if (isCancelled) {
|
||||
throw new ParseError(ParseError.ParseErrorKind.TIMEOUT_OR_CANCELLED);
|
||||
}
|
||||
|
||||
// use prefetched token or get a new one.
|
||||
Token t1= fPrefetchedToken;
|
||||
if (t1 == null) {
|
||||
t1= fetchTokenFromPreprocessor();
|
||||
adjustOffsets(t1);
|
||||
}
|
||||
else {
|
||||
fPrefetchedToken= null;
|
||||
}
|
||||
Token t1= fetchToken();
|
||||
|
||||
final int tt1= t1.getType();
|
||||
switch(tt1) {
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
case IToken.tEND_OF_INPUT:
|
||||
if (fContentAssistLimit < 0) {
|
||||
throw new EndOfFileException();
|
||||
}
|
||||
|
@ -453,8 +500,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
StringBuffer buf= null;
|
||||
int endOffset= 0;
|
||||
loop: while(true) {
|
||||
t2= fetchTokenFromPreprocessor();
|
||||
adjustOffsets(t2);
|
||||
t2= fetchToken();
|
||||
final int tt2= t2.getType();
|
||||
switch(tt2) {
|
||||
case IToken.tLSTRING:
|
||||
|
@ -473,7 +519,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
break loop;
|
||||
}
|
||||
}
|
||||
fPrefetchedToken= t2;
|
||||
pushbackToken(t2);
|
||||
if (buf != null) {
|
||||
char[] image= new char[buf.length() + (isWide ? 3 : 2)];
|
||||
int off= -1;
|
||||
|
@ -494,97 +540,83 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
return t1;
|
||||
}
|
||||
|
||||
private void adjustOffsets(Token t1) {
|
||||
final int offset= fLocationMap.getSequenceNumberForOffset(t1.getOffset());
|
||||
final int endOffset= fLocationMap.getSequenceNumberForOffset(t1.getEndOffset());
|
||||
t1.setOffset(offset, endOffset);
|
||||
t1.setNext(null);
|
||||
}
|
||||
|
||||
private void appendStringContent(StringBuffer buf, Token t1) {
|
||||
final char[] image= t1.getCharImage();
|
||||
final int start= image[0]=='"' ? 1 : 2;
|
||||
buf.append(image, start, image.length-start-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the current token is a left parenthesis, newlines will be ignored.
|
||||
* No preprocessing is performed.
|
||||
*/
|
||||
boolean findLParenthesisInContext() throws OffsetLimitReachedException {
|
||||
Token t= fCurrentContext.currentLexerToken();
|
||||
while(t.getType() == Lexer.tNEWLINE) {
|
||||
t= fCurrentContext.nextPPToken();
|
||||
}
|
||||
return t.getType() == IToken.tLPAREN;
|
||||
}
|
||||
|
||||
Token fetchTokenFromPreprocessor() throws OffsetLimitReachedException {
|
||||
Token internalFetchToken(final boolean expandMacros, final boolean stopAtNewline,
|
||||
final boolean checkNumbers, final ScannerContext uptoEndOfCtx) throws OffsetLimitReachedException {
|
||||
++fTokenCount;
|
||||
Token ppToken= fCurrentContext.currentLexerToken();
|
||||
while(true) {
|
||||
switch(ppToken.getType()) {
|
||||
case Lexer.tBEFORE_INPUT:
|
||||
ppToken= fCurrentContext.nextPPToken();
|
||||
continue;
|
||||
|
||||
case Lexer.tNEWLINE:
|
||||
if (stopAtNewline) {
|
||||
return ppToken;
|
||||
}
|
||||
ppToken= fCurrentContext.nextPPToken();
|
||||
continue;
|
||||
|
||||
case Lexer.tOTHER_CHARACTER:
|
||||
if (!fExpandingMacro) {
|
||||
handleProblem(IProblem.SCANNER_BAD_CHARACTER, ppToken.getCharImage(),
|
||||
ppToken.getOffset(), ppToken.getEndOffset());
|
||||
ppToken= fCurrentContext.nextPPToken();
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
handleProblem(IProblem.SCANNER_BAD_CHARACTER, ppToken.getCharImage(),
|
||||
ppToken.getOffset(), ppToken.getEndOffset());
|
||||
ppToken= fCurrentContext.nextPPToken();
|
||||
continue;
|
||||
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||
if (locationCtx != null) {
|
||||
fLocationMap.popContext(locationCtx);
|
||||
}
|
||||
fCurrentContext= fCurrentContext.getParent();
|
||||
if (fCurrentContext == null) {
|
||||
fCurrentContext= fRootContext;
|
||||
case IToken.tEND_OF_INPUT:
|
||||
if (fCurrentContext == uptoEndOfCtx || uptoEndOfCtx == null) {
|
||||
return ppToken;
|
||||
}
|
||||
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||
fLocationMap.popContext(locationCtx);
|
||||
fCurrentContext= fCurrentContext.getParent();
|
||||
assert fCurrentContext != null;
|
||||
|
||||
ppToken= fCurrentContext.currentLexerToken();
|
||||
continue;
|
||||
|
||||
case IToken.tPOUND:
|
||||
final Lexer lexer= fCurrentContext.getLexerForPPDirective();
|
||||
if (lexer != null) {
|
||||
executeDirective(lexer, ppToken.getOffset());
|
||||
ppToken= fCurrentContext.currentLexerToken();
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case IToken.tPOUND:
|
||||
{
|
||||
final Lexer lexer= fCurrentContext.getLexer();
|
||||
if (lexer != null && lexer.currentTokenIsFirstOnLine()) {
|
||||
executeDirective(lexer, ppToken.getOffset());
|
||||
ppToken= fCurrentContext.currentLexerToken();
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case IToken.tIDENTIFIER:
|
||||
final boolean tryExpansion = !fExpandingMacro && fCurrentContext.expandsMacros();
|
||||
|
||||
fCurrentContext.nextPPToken(); // consume the identifier
|
||||
if (tryExpansion && expandMacro(ppToken)) {
|
||||
ppToken= fCurrentContext.currentLexerToken();
|
||||
continue;
|
||||
}
|
||||
if (expandMacros) {
|
||||
final Lexer lexer= fCurrentContext.getLexer();
|
||||
if (lexer != null && expandMacro(ppToken, lexer, stopAtNewline)) {
|
||||
ppToken= fCurrentContext.currentLexerToken();
|
||||
continue;
|
||||
}
|
||||
|
||||
final char[] name= ppToken.getCharImage();
|
||||
int tokenType = fKeywords.get(name);
|
||||
if (tokenType != fKeywords.undefined) {
|
||||
ppToken.setType(tokenType);
|
||||
}
|
||||
final char[] name= ppToken.getCharImage();
|
||||
int tokenType = fKeywords.get(name);
|
||||
if (tokenType != fKeywords.undefined) {
|
||||
ppToken.setType(tokenType);
|
||||
}
|
||||
}
|
||||
return ppToken;
|
||||
|
||||
case IToken.tINTEGER:
|
||||
if (fCheckNumbers && !fExpandingMacro) {
|
||||
if (checkNumbers) {
|
||||
checkNumber(ppToken, false);
|
||||
}
|
||||
break;
|
||||
|
||||
case IToken.tFLOATINGPT:
|
||||
if (fCheckNumbers) {
|
||||
if (checkNumbers) {
|
||||
checkNumber(ppToken, true);
|
||||
}
|
||||
break;
|
||||
|
@ -822,12 +854,14 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
switch (ident.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
lexer.nextToken();
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, ident);
|
||||
Token completionToken= new TokenWithImage(ident.getType(), null,
|
||||
startOffset, ident.getEndOffset(), ("#" + ident.getImage()).toCharArray()); //$NON-NLS-1$
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, completionToken);
|
||||
|
||||
case Lexer.tNEWLINE:
|
||||
return;
|
||||
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
case IToken.tEND_OF_INPUT:
|
||||
case IToken.tINTEGER:
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
return;
|
||||
|
@ -836,8 +870,8 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
break;
|
||||
|
||||
default:
|
||||
int endOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, ident.getCharImage(), startOffset, endOffset);
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, ident.getCharImage(), startOffset, lexer.getLastEndOffset());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -848,21 +882,10 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
switch (type) {
|
||||
case IPreprocessorDirective.ppImport:
|
||||
case IPreprocessorDirective.ppInclude:
|
||||
if (fExpandingMacro) {
|
||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, name, startOffset, condEndOffset);
|
||||
}
|
||||
else {
|
||||
executeInclude(lexer, startOffset, false, true);
|
||||
}
|
||||
executeInclude(lexer, startOffset, false, true);
|
||||
break;
|
||||
case IPreprocessorDirective.ppInclude_next:
|
||||
if (fExpandingMacro) {
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
}
|
||||
else {
|
||||
executeInclude(lexer, startOffset, true, true);
|
||||
}
|
||||
executeInclude(lexer, startOffset, true, true);
|
||||
break;
|
||||
case IPreprocessorDirective.ppDefine:
|
||||
executeDefine(lexer, startOffset);
|
||||
|
@ -940,37 +963,25 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
|
||||
private void executeInclude(final Lexer lexer, int poundOffset, boolean include_next, boolean active) throws OffsetLimitReachedException {
|
||||
char[] headerName= null;
|
||||
boolean userInclude= true;
|
||||
|
||||
lexer.setInsideIncludeDirective(true);
|
||||
final Token header= lexer.nextToken();
|
||||
lexer.setInsideIncludeDirective(false);
|
||||
final int nameOffset= header.getOffset();
|
||||
int nameEndOffset= header.getEndOffset();
|
||||
int endOffset;
|
||||
|
||||
int condEndOffset= header.getEndOffset();
|
||||
final int[] nameOffsets= new int[] {header.getOffset(), condEndOffset};
|
||||
char[] headerName= null;
|
||||
boolean userInclude= true;
|
||||
|
||||
switch(header.getType()) {
|
||||
case Lexer.tSYSTEM_HEADER_NAME:
|
||||
userInclude= false;
|
||||
char[] image= header.getCharImage();
|
||||
headerName= new char[image.length-2];
|
||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
endOffset= lexer.currentToken().getEndOffset();
|
||||
headerName = extractHeaderName(header.getCharImage(), '<', '>', nameOffsets);
|
||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
break;
|
||||
|
||||
case Lexer.tQUOTE_HEADER_NAME:
|
||||
image= header.getCharImage();
|
||||
if (image.length <= 2) {
|
||||
headerName= CharArrayUtils.EMPTY;
|
||||
}
|
||||
else {
|
||||
headerName= new char[image.length-2];
|
||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
||||
}
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
endOffset= lexer.currentToken().getEndOffset();
|
||||
headerName = extractHeaderName(header.getCharImage(), '"', '"', nameOffsets);
|
||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
break;
|
||||
|
||||
case IToken.tCOMPLETION:
|
||||
|
@ -978,17 +989,15 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
|
||||
case IToken.tIDENTIFIER:
|
||||
TokenList tl= new TokenList();
|
||||
nameEndOffset= getPreprocessedTokensOfLine(lexer, tl);
|
||||
endOffset= lexer.currentToken().getEndOffset();
|
||||
condEndOffset= nameOffsets[1]= getTokensWithinPPDirective(lexer, false, tl);
|
||||
Token t= tl.first();
|
||||
if (t != null) {
|
||||
switch(t.getType()) {
|
||||
case IToken.tSTRING:
|
||||
image= t.getCharImage();
|
||||
headerName= new char[image.length-2];
|
||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
||||
headerName = extractHeaderName(t.getCharImage(), '"', '"', new int[]{0,0});
|
||||
break;
|
||||
case IToken.tLT:
|
||||
userInclude= false;
|
||||
boolean complete= false;
|
||||
StringBuffer buf= new StringBuffer();
|
||||
t= (Token) t.getNext();
|
||||
|
@ -1000,23 +1009,22 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
buf.append(t.getImage());
|
||||
t= (Token) t.getNext();
|
||||
}
|
||||
if (!complete && fContentAssistLimit >= 0 && fCurrentContext == fRootContext) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, null);
|
||||
if (complete) {
|
||||
headerName= new char[buf.length()];
|
||||
buf.getChars(0, buf.length(), headerName, 0);
|
||||
}
|
||||
headerName= new char[buf.length()];
|
||||
buf.getChars(0, buf.length(), headerName, 0);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
endOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
break;
|
||||
}
|
||||
if (headerName == null || headerName.length==0) {
|
||||
if (active) {
|
||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE,
|
||||
lexer.getInputChars(poundOffset, endOffset), poundOffset, nameEndOffset);
|
||||
lexer.getInputChars(poundOffset, condEndOffset), poundOffset, condEndOffset);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -1031,13 +1039,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
if (!isCircularInclusion(path)) {
|
||||
reported= true;
|
||||
fAllIncludedFiles.add(path);
|
||||
ILocationCtx ctx= fLocationMap.pushInclusion(poundOffset, nameOffset, nameEndOffset, endOffset, reader.buffer, path, headerName, userInclude);
|
||||
ScannerContextFile fctx= new ScannerContextFile(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this, this));
|
||||
ILocationCtx ctx= fLocationMap.pushInclusion(poundOffset, nameOffsets[0], nameOffsets[1], condEndOffset, reader.buffer, path, headerName, userInclude);
|
||||
ScannerContext fctx= new ScannerContext(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this, this));
|
||||
fCurrentContext= fctx;
|
||||
}
|
||||
}
|
||||
else {
|
||||
handleProblem(IProblem.PREPROCESSOR_INCLUSION_NOT_FOUND, headerName, poundOffset, nameEndOffset);
|
||||
handleProblem(IProblem.PREPROCESSOR_INCLUSION_NOT_FOUND, headerName, poundOffset, condEndOffset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -1060,9 +1068,27 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
}
|
||||
if (!reported) {
|
||||
fLocationMap.encounterPoundInclude(poundOffset, nameOffset, nameEndOffset, endOffset, headerName, path, !userInclude, active);
|
||||
fLocationMap.encounterPoundInclude(poundOffset, nameOffsets[0], nameOffsets[1], condEndOffset, headerName, path, userInclude, active);
|
||||
}
|
||||
}
|
||||
|
||||
private char[] extractHeaderName(final char[] image, final char startDelim, final char endDelim, int[] offsets) {
|
||||
char[] headerName;
|
||||
int start= 0;
|
||||
int length= image.length;
|
||||
if (length > 0 && image[length-1] == endDelim) {
|
||||
length--;
|
||||
offsets[1]--;
|
||||
if (length > 0 && image[0] == startDelim) {
|
||||
offsets[0]++;
|
||||
start++;
|
||||
length--;
|
||||
}
|
||||
}
|
||||
headerName= new char[length];
|
||||
System.arraycopy(image, start, headerName, 0, length);
|
||||
return headerName;
|
||||
}
|
||||
|
||||
private boolean isCircularInclusion(String filename) {
|
||||
ILocationCtx checkContext= fCurrentContext.getLocationCtx();
|
||||
|
@ -1141,7 +1167,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
boolean isActive= false;
|
||||
TokenList condition= new TokenList();
|
||||
final int condOffset= lexer.nextToken().getOffset();
|
||||
final int condEndOffset= getPreprocessedTokensOfLine(lexer, condition);
|
||||
final int condEndOffset= getTokensWithinPPDirective(lexer, true, condition);
|
||||
final int endOffset= lexer.currentToken().getEndOffset();
|
||||
|
||||
if (condition.first() == null) {
|
||||
|
@ -1165,29 +1191,39 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
|
||||
/**
|
||||
* Runs the preprocessor on the rest of the line, storing the tokens in the holder supplied.
|
||||
* Macro expansion is reported to the location map.
|
||||
* Returns the end-offset of the last token used from the input.
|
||||
* Macro expansion is reported to the location map.
|
||||
* In case isCondition is set to <code>true</code>, identifiers with image 'defined' are
|
||||
* converted to the defined-token and its argument is not macro expanded.
|
||||
* Returns the end-offset of the last token that was consumed.
|
||||
*/
|
||||
private int getPreprocessedTokensOfLine(Lexer lexer, TokenList result) throws OffsetLimitReachedException {
|
||||
final ScannerContext sctx= fCurrentContext;
|
||||
final ScannerContextPPDirective ppdCtx= new ScannerContextPPDirective(lexer, true);
|
||||
fCurrentContext= ppdCtx;
|
||||
boolean cn= fCheckNumbers;
|
||||
fCheckNumbers= false;
|
||||
try {
|
||||
Token t= fetchTokenFromPreprocessor();
|
||||
while (t.getType() != Lexer.tEND_OF_INPUT) {
|
||||
result.append(t);
|
||||
t= fetchTokenFromPreprocessor();
|
||||
}
|
||||
// make sure an exception is thrown if we are running content assist at the end of the line
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
return ppdCtx.getLastEndOffset();
|
||||
}
|
||||
finally {
|
||||
fCurrentContext= sctx;
|
||||
fCheckNumbers= cn;
|
||||
}
|
||||
private int getTokensWithinPPDirective(Lexer lexer, boolean isCondition, TokenList result) throws OffsetLimitReachedException {
|
||||
final ScannerContext scannerCtx= fCurrentContext;
|
||||
boolean expandMacros= true;
|
||||
loop: while(true) {
|
||||
Token t= internalFetchToken(expandMacros, true, false, scannerCtx);
|
||||
switch(t.getType()) {
|
||||
case IToken.tEND_OF_INPUT:
|
||||
case IToken.tCOMPLETION:
|
||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE); // make sure the exception is thrown.
|
||||
break loop;
|
||||
case Lexer.tNEWLINE:
|
||||
break loop;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (isCondition && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
|
||||
t.setType(CPreprocessor.tDEFINED);
|
||||
expandMacros= false;
|
||||
}
|
||||
break;
|
||||
case IToken.tLPAREN:
|
||||
break;
|
||||
default:
|
||||
expandMacros= true;
|
||||
break;
|
||||
}
|
||||
result.append(t);
|
||||
}
|
||||
// make sure an exception is thrown if we are running content assist at the end of the line
|
||||
return lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||
}
|
||||
|
||||
private void skipOverConditionalCode(final Lexer lexer, boolean takeElseBranch) throws OffsetLimitReachedException {
|
||||
|
@ -1262,7 +1298,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
int condOffset= lexer.nextToken().getOffset();
|
||||
if (nesting == 0 && takeElseBranch) {
|
||||
TokenList condition= new TokenList();
|
||||
condEndOffset= getPreprocessedTokensOfLine(lexer, condition);
|
||||
condEndOffset= getTokensWithinPPDirective(lexer, true, condition);
|
||||
if (condition.first() != null) {
|
||||
try {
|
||||
isActive= fExpressionEvaluator.evaluate(condition, fMacroDictionary);
|
||||
|
@ -1314,13 +1350,12 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
* Checks whether the identifier causes a macro expansion. May advance the current lexer
|
||||
* to check for the opening bracket succeeding the identifier.
|
||||
* <p>
|
||||
* If applicable the macro is expanded and the resulting tokens are put into a scanner context.
|
||||
* If applicable the macro is expanded and the resulting tokens are put onto a new context.
|
||||
* @param identifier the token where macro expansion may occur.
|
||||
* @param multiline whether we are allowed to check subsequent lines for macro arguments.
|
||||
* @return
|
||||
* @throws OffsetLimitReachedException
|
||||
* @param lexer the input for the expansion.
|
||||
* @param stopAtNewline whether or not tokens to be read are limited to the current line.
|
||||
*/
|
||||
private boolean expandMacro(final Token identifier) throws OffsetLimitReachedException {
|
||||
private boolean expandMacro(final Token identifier, Lexer lexer, boolean stopAtNewline) throws OffsetLimitReachedException {
|
||||
final char[] name= identifier.getCharImage();
|
||||
PreprocessorMacro macro= (PreprocessorMacro) fMacroDictionary.get(name);
|
||||
if (macro == null) {
|
||||
|
@ -1328,23 +1363,25 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
|
||||
if (macro instanceof FunctionStyleMacro) {
|
||||
if (!findLParenthesisInContext()) {
|
||||
return false;
|
||||
}
|
||||
Token t= lexer.currentToken();
|
||||
if (!stopAtNewline) {
|
||||
while(t.getType() == Lexer.tNEWLINE) {
|
||||
t= lexer.nextToken();
|
||||
}
|
||||
}
|
||||
if (t.getType() != IToken.tLPAREN) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
fExpandingMacro= true;
|
||||
final boolean contentAssist = fContentAssistLimit>=0 && fCurrentContext == fRootContext;
|
||||
TokenList replacement= new TokenList();
|
||||
final int endOffset= fMacroExpander.expand(macro, identifier, contentAssist, replacement);
|
||||
fExpandingMacro= false;
|
||||
|
||||
final ImageLocationInfo[] ili= fMacroExpander.createImageLocations(replacement);
|
||||
final IASTName[] expansions= fMacroExpander.createImplicitExpansions();
|
||||
final int length= fMacroExpander.adjustOffsets(replacement);
|
||||
TokenList replacement= fMacroExpander.expand(lexer, stopAtNewline, macro, identifier, contentAssist);
|
||||
final IASTName[] expansions= fMacroExpander.clearImplicitExpansions();
|
||||
final ImageLocationInfo[] ili= fMacroExpander.clearImageLocationInfos();
|
||||
final Token last= replacement.last();
|
||||
final int length= last == null ? 0 : last.getEndOffset();
|
||||
ILocationCtx ctx= fLocationMap.pushMacroExpansion(
|
||||
identifier.getOffset(), identifier.getEndOffset(), endOffset, length, macro, expansions, ili);
|
||||
fCurrentContext= new ScannerContextMacroExpansion(ctx, fCurrentContext, replacement);
|
||||
|
||||
identifier.getOffset(), identifier.getEndOffset(), lexer.getLastEndOffset(), length, macro, expansions, ili);
|
||||
fCurrentContext= new ScannerContext(ctx, fCurrentContext, replacement);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1355,7 +1392,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
return null;
|
||||
}
|
||||
|
||||
// stuff to be removed
|
||||
// old scanner, remove this.
|
||||
public CharArrayObjectMap getRealDefinitions() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
@ -1366,7 +1403,6 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public void setScanComments(boolean val) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public char[] getMainFilename() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -272,7 +272,7 @@ class ExpressionEvaluator {
|
|||
private void consume() {
|
||||
fTokens= (Token) fTokens.getNext();
|
||||
if (fTokens == null) {
|
||||
fTokens= new Token(Lexer.tEND_OF_INPUT, null, 0, 0);
|
||||
fTokens= new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
|||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
|
@ -74,7 +75,7 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
|||
|
||||
/**
|
||||
* @see IASTTranslationUnit#getContainingFilename()
|
||||
* mstodo- scanner removal should be renamed
|
||||
* mstodo- old location resolver, should be renamed
|
||||
*/
|
||||
String getContainingFilename(int sequenceNumber);
|
||||
|
||||
|
@ -108,6 +109,11 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
|||
*/
|
||||
IASTNodeLocation[] getLocations(int sequenceNumber, int length);
|
||||
|
||||
/**
|
||||
* @see IASTName#getImageLocation()
|
||||
*/
|
||||
IASTImageLocation getImageLocation(int offset, int length);
|
||||
|
||||
/**
|
||||
* Returns the sequence-number for the given file-path and offset, or <code>-1</code> if this file
|
||||
* is not part of the translation-unit.
|
||||
|
@ -122,7 +128,9 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
|||
char[] getUnpreprocessedSignature(IASTFileLocation loc);
|
||||
|
||||
/**
|
||||
* Returns a preprocessor node surrounding the given range, or <code>null</code>.
|
||||
* Returns a preprocessor node surrounding the given range, or <code>null</code>. The result is either a
|
||||
* preprocessing directive ({@link IASTPreprocessorStatement}) or a name contained therein {@link IASTName} or
|
||||
* a macro expansion ({@link IASTName}).
|
||||
*/
|
||||
IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length);
|
||||
}
|
||||
|
|
|
@ -10,14 +10,82 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||
|
||||
/**
|
||||
* Information needed for computing image-locations. An image location exists for a name and describes where the name
|
||||
* came from. This can be: source code, macro-expansion, parameter to macro-expansion or synthetic.
|
||||
*
|
||||
* @since 5.0
|
||||
*/
|
||||
public class ImageLocationInfo {
|
||||
public abstract class ImageLocationInfo {
|
||||
|
||||
public static final ImageLocationInfo[] NO_LOCATION_INFOS= {};
|
||||
|
||||
int fTokenOffsetInExpansion= -1;
|
||||
|
||||
public abstract IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto);
|
||||
public abstract boolean canConcatenate(ImageLocationInfo info);
|
||||
|
||||
public static class MacroImageLocationInfo extends ImageLocationInfo {
|
||||
private final ObjectStyleMacro fMacro;
|
||||
private final int fOffset;
|
||||
private final int fEndOffset;
|
||||
public MacroImageLocationInfo(ObjectStyleMacro macro, int offset, int endOffset) {
|
||||
fMacro= macro;
|
||||
fOffset= offset;
|
||||
fEndOffset= endOffset;
|
||||
}
|
||||
|
||||
public IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto) {
|
||||
IASTPreprocessorMacroDefinition md= lm.getMacroDefinition(fMacro);
|
||||
IASTFileLocation expansionLoc= md.getExpansionLocation();
|
||||
if (expansionLoc != null) {
|
||||
final int length= ((MacroImageLocationInfo) upto).fEndOffset - fOffset;
|
||||
return new ASTImageLocation(IASTImageLocation.MACRO_DEFINITION,
|
||||
expansionLoc.getFileName(), expansionLoc.getNodeOffset() + fOffset, length);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean canConcatenate(ImageLocationInfo info) {
|
||||
if (info instanceof MacroImageLocationInfo) {
|
||||
MacroImageLocationInfo mli= (MacroImageLocationInfo) info;
|
||||
if (mli.fMacro == fMacro && fEndOffset <= mli.fOffset) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static class ParameterImageLocationInfo extends ImageLocationInfo {
|
||||
public int fSequenceNumber;
|
||||
public int fSequenceEndNumber;
|
||||
public ParameterImageLocationInfo(int sequenceNumber, int sequenceEndNumber) {
|
||||
fSequenceNumber= sequenceNumber;
|
||||
fSequenceEndNumber= sequenceEndNumber;
|
||||
}
|
||||
public IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto) {
|
||||
int sequenceEnd= ((ParameterImageLocationInfo) upto).fSequenceEndNumber;
|
||||
IASTFileLocation loc= lm.getMappedFileLocation(fSequenceNumber, sequenceEnd-fSequenceNumber);
|
||||
if (loc != null) {
|
||||
return new ASTImageLocation(IASTImageLocation.ARGUMENT_TO_MACRO_EXPANSION,
|
||||
loc.getFileName(), loc.getNodeOffset(), loc.getNodeLength());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean canConcatenate(ImageLocationInfo info) {
|
||||
if (info instanceof ParameterImageLocationInfo) {
|
||||
ParameterImageLocationInfo pli= (ParameterImageLocationInfo) info;
|
||||
if (fSequenceEndNumber <= pli.fSequenceNumber) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|||
* Returns preprocessor tokens.
|
||||
* <p>
|
||||
* In addition to the preprocessor tokens the following tokens may also be returned:
|
||||
* {@link #tBEFORE_INPUT}, {@link #tEND_OF_INPUT}, {@link IToken#tCOMPLETION}.
|
||||
* {@link #tBEFORE_INPUT}, {@link IToken#tEND_OF_INPUT}, {@link IToken#tCOMPLETION}.
|
||||
* <p>
|
||||
* Number literals are split up into {@link IToken#tINTEGER} and {@link IToken#tFLOATINGPT}.
|
||||
* No checks are done on the number literals.
|
||||
|
@ -38,10 +38,9 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|||
final public class Lexer {
|
||||
public static final int tBEFORE_INPUT = IToken.FIRST_RESERVED_SCANNER;
|
||||
public static final int tNEWLINE = IToken.FIRST_RESERVED_SCANNER + 1;
|
||||
public static final int tEND_OF_INPUT = IToken.FIRST_RESERVED_SCANNER + 2;
|
||||
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
||||
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 4;
|
||||
public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 5;
|
||||
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 2;
|
||||
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
||||
public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 4;
|
||||
|
||||
private static final int END_OF_INPUT = -1;
|
||||
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
||||
|
@ -49,7 +48,7 @@ final public class Lexer {
|
|||
public final static class LexerOptions implements Cloneable {
|
||||
public boolean fSupportDollarInitializers= true;
|
||||
public boolean fSupportMinAndMax= true;
|
||||
public boolean fSupportContentAssist= false;
|
||||
public boolean fCreateImageLocations= true;
|
||||
|
||||
public Object clone() {
|
||||
try {
|
||||
|
@ -62,6 +61,7 @@ final public class Lexer {
|
|||
|
||||
// configuration
|
||||
private final LexerOptions fOptions;
|
||||
private boolean fSupportContentAssist= false;
|
||||
private final ILexerLog fLog;
|
||||
private final Object fSource;
|
||||
|
||||
|
@ -77,12 +77,12 @@ final public class Lexer {
|
|||
|
||||
private boolean fInsideIncludeDirective= false;
|
||||
private Token fToken;
|
||||
private Token fLastToken;
|
||||
|
||||
// for the few cases where we have to lookahead more than one character
|
||||
private int fMarkOffset;
|
||||
private int fMarkEndOffset;
|
||||
private int fMarkPrefetchedChar;
|
||||
private boolean fFirstTokenAfterNewline= true;
|
||||
|
||||
|
||||
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
||||
|
@ -96,7 +96,7 @@ final public class Lexer {
|
|||
fOptions= options;
|
||||
fLog= log;
|
||||
fSource= source;
|
||||
fToken= new Token(tBEFORE_INPUT, source, start, start);
|
||||
fLastToken= fToken= new Token(tBEFORE_INPUT, source, start, start);
|
||||
nextCharPhase3();
|
||||
}
|
||||
|
||||
|
@ -111,8 +111,8 @@ final public class Lexer {
|
|||
* Resets the lexer to the first char and prepares for content-assist mode.
|
||||
*/
|
||||
public void setContentAssistMode(int offset) {
|
||||
fOptions.fSupportContentAssist= true;
|
||||
fLimit= Math.min(fLimit, fInput.length);
|
||||
fSupportContentAssist= true;
|
||||
fLimit= Math.min(offset, fInput.length);
|
||||
// re-initialize
|
||||
fOffset= fEndOffset= fStart;
|
||||
nextCharPhase3();
|
||||
|
@ -132,25 +132,32 @@ final public class Lexer {
|
|||
public Token currentToken() {
|
||||
return fToken;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the endoffset of the token before the current one.
|
||||
*/
|
||||
public int getLastEndOffset() {
|
||||
return fLastToken.getEndOffset();
|
||||
}
|
||||
|
||||
/**
|
||||
* Advances to the next token, skipping whitespace other than newline.
|
||||
* @throws OffsetLimitReachedException when completion is requested in a literal or a header-name.
|
||||
*/
|
||||
public Token nextToken() throws OffsetLimitReachedException {
|
||||
final int t= fToken.getType();
|
||||
fFirstTokenAfterNewline= t == tNEWLINE || t == tBEFORE_INPUT;
|
||||
fLastToken= fToken;
|
||||
return fToken= fetchToken();
|
||||
}
|
||||
|
||||
public boolean currentTokenIsFirstOnLine() {
|
||||
return fFirstTokenAfterNewline;
|
||||
final int type= fLastToken.getType();
|
||||
return type == tNEWLINE || type == tBEFORE_INPUT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Advances to the next newline.
|
||||
* @return the end offset of the last token before the newline or the start of the newline
|
||||
* if there were no other tokens.
|
||||
* Advances to the next newline or the end of input. The newline will not be consumed. If the
|
||||
* current token is a newline no action is performed.
|
||||
* Returns the end offset of the last token before the newline.
|
||||
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
|
||||
* @since 5.0
|
||||
*/
|
||||
|
@ -160,17 +167,22 @@ final public class Lexer {
|
|||
while(true) {
|
||||
switch(t.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
if (lt != null) {
|
||||
fLastToken= lt;
|
||||
}
|
||||
fToken= t;
|
||||
throw new OffsetLimitReachedException(origin, t);
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
fToken= t;
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(origin, lt);
|
||||
case IToken.tEND_OF_INPUT:
|
||||
if (fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(origin, t);
|
||||
}
|
||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
||||
// no break;
|
||||
case Lexer.tNEWLINE:
|
||||
fToken= t;
|
||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
||||
if (lt != null) {
|
||||
fLastToken= lt;
|
||||
}
|
||||
return getLastEndOffset();
|
||||
}
|
||||
lt= t;
|
||||
t= fetchToken();
|
||||
|
@ -219,7 +231,7 @@ final public class Lexer {
|
|||
|
||||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
fToken= newToken(Lexer.tEND_OF_INPUT, start);
|
||||
fLastToken= fToken= newToken(IToken.tEND_OF_INPUT, start);
|
||||
return fToken;
|
||||
case '\n':
|
||||
haveNL= true;
|
||||
|
@ -266,7 +278,7 @@ final public class Lexer {
|
|||
}
|
||||
restorePhase3();
|
||||
}
|
||||
fFirstTokenAfterNewline= true;
|
||||
fLastToken= new Token(tNEWLINE, fSource, 0, start); // offset not significant
|
||||
fToken= newDigraphToken(IToken.tPOUND, start);
|
||||
return fToken;
|
||||
}
|
||||
|
@ -275,7 +287,7 @@ final public class Lexer {
|
|||
|
||||
case '#':
|
||||
if (hadNL && d != '#') {
|
||||
fFirstTokenAfterNewline= true;
|
||||
fLastToken= new Token(tNEWLINE, fSource, 0, start); // offset not significant
|
||||
fToken= newToken(IToken.tPOUND, start);
|
||||
return fToken;
|
||||
}
|
||||
|
@ -297,7 +309,7 @@ final public class Lexer {
|
|||
final int d= nextCharPhase3();
|
||||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
return newToken(Lexer.tEND_OF_INPUT, start);
|
||||
return newToken(IToken.tEND_OF_INPUT, start);
|
||||
case '\n':
|
||||
fInsideIncludeDirective= false;
|
||||
return newToken(Lexer.tNEWLINE, start);
|
||||
|
@ -633,7 +645,7 @@ final public class Lexer {
|
|||
loop: while (!done) {
|
||||
switch (c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
if (fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||
newToken((expectQuotes ? tQUOTE_HEADER_NAME : tSYSTEM_HEADER_NAME), start, length));
|
||||
}
|
||||
|
@ -695,7 +707,7 @@ final public class Lexer {
|
|||
loop: while (!done) {
|
||||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
if (fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length));
|
||||
}
|
||||
// no break;
|
||||
|
@ -731,7 +743,7 @@ final public class Lexer {
|
|||
loop: while (!done) {
|
||||
switch(c) {
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
if (fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLCHAR : IToken.tCHAR, start, length));
|
||||
}
|
||||
// no break;
|
||||
|
@ -788,7 +800,7 @@ final public class Lexer {
|
|||
break;
|
||||
|
||||
case END_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
if (fSupportContentAssist) {
|
||||
tokenKind= IToken.tCOMPLETION;
|
||||
}
|
||||
isPartOfIdentifier= false;
|
||||
|
@ -878,8 +890,8 @@ final public class Lexer {
|
|||
}
|
||||
break;
|
||||
|
||||
case tEND_OF_INPUT:
|
||||
if (fOptions.fSupportContentAssist) {
|
||||
case END_OF_INPUT:
|
||||
if (fSupportContentAssist) {
|
||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||
newToken((isFloat ? IToken.tFLOATINGPT : IToken.tINTEGER), start, length));
|
||||
}
|
||||
|
|
|
@ -90,6 +90,13 @@ abstract class LocationCtx implements ILocationCtx {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the macro-expansion surrounding or augmenting the given range, or <code>null</code>.
|
||||
*/
|
||||
public LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the minimal file location containing the specified sequence number range, assuming
|
||||
* that it is contained in this context.
|
||||
|
|
|
@ -83,17 +83,26 @@ class LocationCtxContainer extends LocationCtx {
|
|||
|
||||
public final LocationCtx findSurroundingContext(int sequenceNumber, int length) {
|
||||
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, false);
|
||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||
return child.findSurroundingContext(sequenceNumber, length);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, true);
|
||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||
return child.findSurroundingMacroExpansion(sequenceNumber, length);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public IASTFileLocation findMappedFileLocation(int sequenceNumber, int length) {
|
||||
// try to delegate to a child.
|
||||
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, false);
|
||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||
return child.findMappedFileLocation(sequenceNumber, length);
|
||||
}
|
||||
|
@ -103,7 +112,7 @@ class LocationCtxContainer extends LocationCtx {
|
|||
public boolean collectLocations(int sequenceNumber, final int length, ArrayList locations) {
|
||||
final int endSequenceNumber= sequenceNumber+length;
|
||||
if (fChildren != null) {
|
||||
int childIdx= Math.max(0, findChildIdxLessOrEqualThan(sequenceNumber));
|
||||
int childIdx= Math.max(0, findChildIdxLessOrEqualThan(sequenceNumber, false));
|
||||
for (; childIdx < fChildren.size(); childIdx++) {
|
||||
final LocationCtx child= (LocationCtx) fChildren.get(childIdx);
|
||||
|
||||
|
@ -154,7 +163,7 @@ class LocationCtxContainer extends LocationCtx {
|
|||
return null;
|
||||
}
|
||||
|
||||
final int findChildIdxLessOrEqualThan(int sequenceNumber) {
|
||||
final int findChildIdxLessOrEqualThan(int sequenceNumber, boolean beforeReplacedChars) {
|
||||
if (fChildren == null) {
|
||||
return -1;
|
||||
}
|
||||
|
@ -163,7 +172,11 @@ class LocationCtxContainer extends LocationCtx {
|
|||
while (upper > lower) {
|
||||
int middle= (upper+lower)/2;
|
||||
LocationCtx child= (LocationCtx) fChildren.get(middle);
|
||||
if (child.fSequenceNumber <= sequenceNumber) {
|
||||
int childSequenceNumber= child.fSequenceNumber;
|
||||
if (beforeReplacedChars) {
|
||||
childSequenceNumber-= child.fEndOffsetInParent-child.fOffsetInParent;
|
||||
}
|
||||
if (childSequenceNumber <= sequenceNumber) {
|
||||
lower= middle+1;
|
||||
}
|
||||
else {
|
||||
|
@ -173,8 +186,8 @@ class LocationCtxContainer extends LocationCtx {
|
|||
return lower-1;
|
||||
}
|
||||
|
||||
final LocationCtx findChildLessOrEqualThan(final int sequenceNumber) {
|
||||
final int idx= findChildIdxLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx findChildLessOrEqualThan(final int sequenceNumber, boolean beforeReplacedChars) {
|
||||
final int idx= findChildIdxLessOrEqualThan(sequenceNumber, beforeReplacedChars);
|
||||
return idx >= 0 ? (LocationCtx) fChildren.get(idx) : null;
|
||||
}
|
||||
|
||||
|
|
|
@ -41,9 +41,9 @@ class LocationCtxFile extends LocationCtxContainer {
|
|||
// try to delegate to a child.
|
||||
final int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||
final int sequenceEnd= sequenceNumber+length;
|
||||
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber);
|
||||
final LocationCtx child2= testEnd == sequenceNumber ? child1 : findChildLessOrEqualThan(testEnd);
|
||||
|
||||
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber, false);
|
||||
final LocationCtx child2= testEnd == sequenceNumber ? child1 : findChildLessOrEqualThan(testEnd, false);
|
||||
|
||||
if (child1 == child2 && child1 != null && child1.fSequenceNumber + child1.getSequenceLength() > testEnd) {
|
||||
return child1.findMappedFileLocation(sequenceNumber, length);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
|
||||
|
@ -22,7 +23,8 @@ import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
|||
class LocationCtxMacroExpansion extends LocationCtx {
|
||||
private final LocationMap fLocationMap;
|
||||
private final int fLength;
|
||||
private ASTMacroReferenceName fName;
|
||||
private final ASTMacroReferenceName fName;
|
||||
private final ImageLocationInfo[] fLocationInfos;
|
||||
|
||||
public LocationCtxMacroExpansion(LocationMap map, LocationCtxContainer parent, int parentOffset, int parentEndOffset,
|
||||
int sequenceNumber, int length, ImageLocationInfo[] imageLocations, ASTMacroReferenceName expansion) {
|
||||
|
@ -30,6 +32,7 @@ class LocationCtxMacroExpansion extends LocationCtx {
|
|||
fLocationMap= map;
|
||||
fLength= length;
|
||||
fName= expansion;
|
||||
fLocationInfos= imageLocations;
|
||||
}
|
||||
|
||||
public int getSequenceLength() {
|
||||
|
@ -49,9 +52,48 @@ class LocationCtxMacroExpansion extends LocationCtx {
|
|||
return false;
|
||||
}
|
||||
|
||||
public ASTMacroReferenceName getMacroReference() {
|
||||
return fName;
|
||||
}
|
||||
|
||||
public IASTPreprocessorMacroDefinition getMacroDefinition() {
|
||||
return fLocationMap.getMacroDefinition((IMacroBinding) fName.getBinding());
|
||||
}
|
||||
|
||||
public LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public IASTImageLocation getImageLocation(int offset, int length) {
|
||||
if (length == 0) {
|
||||
return null;
|
||||
}
|
||||
final int end= offset+length;
|
||||
int nextToCheck= offset;
|
||||
ImageLocationInfo firstInfo= null;
|
||||
ImageLocationInfo lastInfo= null;
|
||||
for (int i = 0; i < fLocationInfos.length; i++) {
|
||||
ImageLocationInfo info = fLocationInfos[i];
|
||||
if (info.fTokenOffsetInExpansion == nextToCheck) {
|
||||
if (lastInfo == null) {
|
||||
firstInfo= lastInfo= info;
|
||||
}
|
||||
else if (lastInfo.canConcatenate(info)) {
|
||||
lastInfo= info;
|
||||
}
|
||||
else {
|
||||
return null;
|
||||
}
|
||||
if (++nextToCheck == end) {
|
||||
return firstInfo.createLocation(fLocationMap, lastInfo);
|
||||
}
|
||||
}
|
||||
else if (info.fTokenOffsetInExpansion > nextToCheck) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ import java.util.List;
|
|||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
|
@ -120,7 +121,7 @@ public class LocationMap implements ILocationResolver {
|
|||
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
||||
int endNumber= getSequenceNumberForOffset(endOffset);
|
||||
final ASTInclusionStatement inclusionStatement=
|
||||
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, name, filename, userInclude, true);
|
||||
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, endNumber, name, filename, userInclude, true);
|
||||
fDirectives.add(inclusionStatement);
|
||||
fCurrentContext= new LocationCtxFile((LocationCtxContainer) fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
||||
fLastChildInsertionOffset= 0;
|
||||
|
@ -208,8 +209,8 @@ public class LocationMap implements ILocationResolver {
|
|||
startOffset= getSequenceNumberForOffset(startOffset);
|
||||
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||
// not using endOffset, compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, name, filename, userInclude, active));
|
||||
endOffset= getSequenceNumberForOffset(endOffset);
|
||||
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, endOffset, name, filename, userInclude, active));
|
||||
}
|
||||
|
||||
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
||||
|
@ -365,6 +366,25 @@ public class LocationMap implements ILocationResolver {
|
|||
fRootContext.collectLocations(sequenceNumber, length, result);
|
||||
return (IASTNodeLocation[]) result.toArray(new IASTNodeLocation[result.size()]);
|
||||
}
|
||||
|
||||
public IASTImageLocation getImageLocation(int sequenceNumber, int length) {
|
||||
ArrayList result= new ArrayList();
|
||||
fRootContext.collectLocations(sequenceNumber, length, result);
|
||||
if (result.size() != 1) {
|
||||
return null;
|
||||
}
|
||||
IASTNodeLocation loc= (IASTNodeLocation) result.get(0);
|
||||
if (loc instanceof IASTFileLocation) {
|
||||
IASTFileLocation floc= (IASTFileLocation) loc;
|
||||
return new ASTImageLocation(IASTImageLocation.REGULAR_CODE,
|
||||
floc.getFileName(), floc.getNodeOffset(), floc.getNodeLength());
|
||||
}
|
||||
if (loc instanceof ASTMacroExpansionLocation) {
|
||||
ASTMacroExpansionLocation mel= (ASTMacroExpansionLocation) loc;
|
||||
return mel.getImageLocation();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length) {
|
||||
int lower=0;
|
||||
|
@ -386,6 +406,17 @@ public class LocationMap implements ILocationResolver {
|
|||
upper= middle-1;
|
||||
}
|
||||
}
|
||||
// search for a macro-expansion
|
||||
LocationCtxMacroExpansion ctx= fRootContext.findSurroundingMacroExpansion(sequenceNumber, length);
|
||||
if (ctx != null) {
|
||||
ASTMacroReferenceName candidate= ctx.getMacroReference();
|
||||
final int candSequenceNumber = candidate.getOffset();
|
||||
final int candEndSequenceNumber = candSequenceNumber + candidate.getLength();
|
||||
if (candSequenceNumber <= sequenceNumber && sequenceNumber + length <= candEndSequenceNumber) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -497,11 +528,12 @@ public class LocationMap implements ILocationResolver {
|
|||
public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
// mstodo- locations
|
||||
|
||||
// mstodo- old location resolver
|
||||
public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
// mstodo- scanner removal
|
||||
// mstodo- old location resolver
|
||||
public IASTName[] getMacroExpansions() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ class MacroDefinitionParser {
|
|||
final char[] nameChars = name.getCharImage();
|
||||
final char[][] paramList= parseParamList(lexer, name);
|
||||
final Token replacementToken = lexer.currentToken();
|
||||
if (replacementToken.getType() != Lexer.tEND_OF_INPUT) {
|
||||
if (replacementToken.getType() != IToken.tEND_OF_INPUT) {
|
||||
throw new InvalidMacroDefinitionException(nameChars, replacementToken.getOffset(), replacementToken.getEndOffset());
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,7 @@ class MacroDefinitionParser {
|
|||
switch(candidate.getType()) {
|
||||
case IToken.tCOMPLETION:
|
||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, candidate);
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
case IToken.tEND_OF_INPUT:
|
||||
case Lexer.tNEWLINE:
|
||||
break loop;
|
||||
case IToken.tIDENTIFIER:
|
||||
|
|
|
@ -19,6 +19,8 @@ import org.eclipse.cdt.core.parser.IToken;
|
|||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.ImageLocationInfo.MacroImageLocationInfo;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.ImageLocationInfo.ParameterImageLocationInfo;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.Lexer.LexerOptions;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser.TokenParameterReference;
|
||||
|
||||
|
@ -27,7 +29,8 @@ import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser.TokenP
|
|||
* @since 5.0
|
||||
*/
|
||||
public class MacroExpander {
|
||||
private static final int ORIGIN = OffsetLimitReachedException.ORIGIN_MACRO_EXPANSION;
|
||||
private static final int ORIGIN = OffsetLimitReachedException.ORIGIN_MACRO_EXPANSION;
|
||||
private static final Token END_TOKEN = new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||
|
||||
/**
|
||||
* Marks the beginning and the end of the scope of a macro expansion. Necessary to properly
|
||||
|
@ -66,17 +69,25 @@ public class MacroExpander {
|
|||
* Combines a list of tokens with the preprocessor to form the input for macro expansion.
|
||||
*/
|
||||
private class TokenSource extends TokenList {
|
||||
private boolean fUseCpp;
|
||||
private final Lexer fLexer;
|
||||
private final boolean fStopAtNewline;
|
||||
|
||||
public TokenSource(boolean useCpp) {
|
||||
fUseCpp= useCpp;
|
||||
public TokenSource(Lexer lexer, boolean stopAtNewline) {
|
||||
fLexer= lexer;
|
||||
fStopAtNewline= stopAtNewline;
|
||||
}
|
||||
|
||||
public Token fetchFirst() throws OffsetLimitReachedException {
|
||||
Token t= removeFirst();
|
||||
if (t == null && fUseCpp) {
|
||||
t= fCpp.fetchTokenFromPreprocessor();
|
||||
fEndOffset= t.getEndOffset();
|
||||
if (t == null && fLexer != null) {
|
||||
t= fLexer.currentToken();
|
||||
if (fStopAtNewline && t.getType() == Lexer.tNEWLINE) {
|
||||
t= END_TOKEN;
|
||||
}
|
||||
else {
|
||||
fEndOffset= t.getEndOffset();
|
||||
fLexer.nextToken();
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
@ -98,36 +109,46 @@ public class MacroExpander {
|
|||
t= (Token) t.getNext();
|
||||
}
|
||||
|
||||
if (fUseCpp) {
|
||||
return fCpp.findLParenthesisInContext();
|
||||
}
|
||||
if (fLexer != null) {
|
||||
t= fLexer.currentToken();
|
||||
if (!fStopAtNewline) {
|
||||
while(t.getType() == Lexer.tNEWLINE) {
|
||||
t= fLexer.nextToken();
|
||||
}
|
||||
}
|
||||
return t.getType() == IToken.tLPAREN;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private final ILexerLog fLog;
|
||||
private final MacroDefinitionParser fDefinitionParser;
|
||||
private final CharArrayObjectMap fDictionary;
|
||||
private final LocationMap fLocationMap;
|
||||
private final CPreprocessor fCpp;
|
||||
private final LexerOptions fLexOptions;
|
||||
private int fEndOffset;
|
||||
private ArrayList fImplicitMacroExpansions= new ArrayList();
|
||||
private ArrayList fImageLocationInfos= new ArrayList();
|
||||
private boolean fCompletionMode;
|
||||
private int fStartOffset;
|
||||
private int fEndOffset;
|
||||
|
||||
public MacroExpander(CPreprocessor cpp, CharArrayObjectMap dict, LocationMap locationMap, MacroDefinitionParser mdp, LexerOptions lexOptions) {
|
||||
fCpp= cpp;
|
||||
public MacroExpander(ILexerLog log, CharArrayObjectMap dict, LocationMap locationMap, MacroDefinitionParser mdp, LexerOptions lexOptions) {
|
||||
fDictionary= dict;
|
||||
fLocationMap= locationMap;
|
||||
fDefinitionParser= mdp;
|
||||
fLexOptions= lexOptions;
|
||||
fLog= log;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that the identifier has been consumed, stores the result in the list provided and returns the
|
||||
* end offset of the last token read from the preprocessor input.
|
||||
* Expects that the identifier has been consumed, stores the result in the list provided.
|
||||
*/
|
||||
public int expand(PreprocessorMacro macro, Token identifier, boolean completionMode, TokenList expansion) throws OffsetLimitReachedException {
|
||||
public TokenList expand(Lexer lexer, boolean stopAtNewline, PreprocessorMacro macro, Token identifier, boolean completionMode) throws OffsetLimitReachedException {
|
||||
fImplicitMacroExpansions.clear();
|
||||
fImageLocationInfos.clear();
|
||||
|
||||
fStartOffset= identifier.getOffset();
|
||||
fEndOffset= identifier.getEndOffset();
|
||||
fCompletionMode= completionMode;
|
||||
|
@ -135,13 +156,15 @@ public class MacroExpander {
|
|||
IdentityHashMap forbidden= new IdentityHashMap();
|
||||
|
||||
// setup input sequence
|
||||
TokenSource input= new TokenSource(true);
|
||||
TokenSource input= new TokenSource(lexer, stopAtNewline);
|
||||
TokenList firstExpansion= new TokenList();
|
||||
expandOne(identifier, macro, forbidden, input, firstExpansion);
|
||||
input.prepend(firstExpansion);
|
||||
|
||||
expandAll(input, forbidden, expansion);
|
||||
return fEndOffset;
|
||||
TokenList result= expandAll(input, forbidden);
|
||||
postProcessTokens(result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -161,8 +184,7 @@ public class MacroExpander {
|
|||
for (int i = 0; i < argInputs.length; i++) {
|
||||
final TokenSource argInput = argInputs[i];
|
||||
clonedArgs[i]= argInput.cloneTokens();
|
||||
final TokenList expandedArg= new TokenList();
|
||||
expandAll(argInput, forbidden, expandedArg);
|
||||
final TokenList expandedArg= expandAll(argInput, forbidden);
|
||||
expandedArgs[i]= expandedArg;
|
||||
}
|
||||
replaceArgs(macro, clonedArgs, expandedArgs, result);
|
||||
|
@ -174,7 +196,8 @@ public class MacroExpander {
|
|||
return lastConsumed;
|
||||
}
|
||||
|
||||
private void expandAll(TokenSource input, IdentityHashMap forbidden, TokenList result) throws OffsetLimitReachedException {
|
||||
private TokenList expandAll(TokenSource input, IdentityHashMap forbidden) throws OffsetLimitReachedException {
|
||||
final TokenList result= new TokenList();
|
||||
Token l= null;
|
||||
Token t= input.removeFirst();
|
||||
while(t != null) {
|
||||
|
@ -194,8 +217,11 @@ public class MacroExpander {
|
|||
result.append(t);
|
||||
}
|
||||
else {
|
||||
// mstodo- image location
|
||||
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
|
||||
ImageLocationInfo info= null;
|
||||
if (fLexOptions.fCreateImageLocations) {
|
||||
info = createImageLocationInfo(t);
|
||||
}
|
||||
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, info));
|
||||
|
||||
TokenList replacement= new TokenList();
|
||||
|
||||
|
@ -213,6 +239,20 @@ public class MacroExpander {
|
|||
l= t;
|
||||
t= input.removeFirst();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private ImageLocationInfo createImageLocationInfo(Token t) {
|
||||
final Object s= t.fSource;
|
||||
if (s instanceof ObjectStyleMacro) {
|
||||
return new MacroImageLocationInfo((ObjectStyleMacro) s, fEndOffset, fEndOffset);
|
||||
}
|
||||
else if (s instanceof CPreprocessor) {
|
||||
int sequenceNumber= fLocationMap.getSequenceNumberForOffset(t.getOffset());
|
||||
int sequenceEndNumber= fLocationMap.getSequenceNumberForOffset(t.getEndOffset());
|
||||
return new ParameterImageLocationInfo(sequenceNumber, sequenceEndNumber);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void addSpacemarker(Token l, Token t, TokenList target) {
|
||||
|
@ -242,7 +282,7 @@ public class MacroExpander {
|
|||
int idx= 0;
|
||||
int nesting= -1;
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
result[i]= new TokenSource(false);
|
||||
result[i]= new TokenSource(null, false);
|
||||
}
|
||||
|
||||
boolean complete= false;
|
||||
|
@ -256,7 +296,7 @@ public class MacroExpander {
|
|||
}
|
||||
lastToken= t;
|
||||
switch(t.getType()) {
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
case IToken.tEND_OF_INPUT:
|
||||
assert nesting >= 0;
|
||||
if (fCompletionMode) {
|
||||
throw new OffsetLimitReachedException(ORIGIN, null);
|
||||
|
@ -266,7 +306,6 @@ public class MacroExpander {
|
|||
throw new OffsetLimitReachedException(ORIGIN, t);
|
||||
|
||||
case Lexer.tNEWLINE:
|
||||
assert false; // we should not get any newlines from macros or the preprocessor.
|
||||
continue loop;
|
||||
|
||||
case IToken.tLPAREN:
|
||||
|
@ -336,7 +375,7 @@ public class MacroExpander {
|
|||
}
|
||||
|
||||
private void handleProblem(int problemID, char[] arg) {
|
||||
fCpp.handleProblem(problemID, arg, fStartOffset, fEndOffset);
|
||||
fLog.handleProblem(problemID, arg, fStartOffset, fEndOffset);
|
||||
}
|
||||
|
||||
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
||||
|
@ -545,7 +584,7 @@ public class MacroExpander {
|
|||
try {
|
||||
Token t1= lex.nextToken();
|
||||
Token t2= lex.nextToken();
|
||||
if (t1.getType() != Lexer.tEND_OF_INPUT && t2.getType() == Lexer.tEND_OF_INPUT) {
|
||||
if (t1.getType() != IToken.tEND_OF_INPUT && t2.getType() == IToken.tEND_OF_INPUT) {
|
||||
t1.setOffset(arg1.getOffset(), arg2.getEndOffset());
|
||||
return t1;
|
||||
}
|
||||
|
@ -604,26 +643,44 @@ public class MacroExpander {
|
|||
}
|
||||
}
|
||||
|
||||
public IASTName[] createImplicitExpansions() {
|
||||
public IASTName[] clearImplicitExpansions() {
|
||||
IASTName[] result= (IASTName[]) fImplicitMacroExpansions.toArray(new IASTName[fImplicitMacroExpansions.size()]);
|
||||
fImplicitMacroExpansions.clear();
|
||||
return result;
|
||||
}
|
||||
|
||||
public ImageLocationInfo[] createImageLocations(TokenList replacement) {
|
||||
// mstodo- image locations
|
||||
return ImageLocationInfo.NO_LOCATION_INFOS;
|
||||
public ImageLocationInfo[] clearImageLocationInfos() {
|
||||
ImageLocationInfo[] result= (ImageLocationInfo[]) fImageLocationInfos.toArray(new ImageLocationInfo[fImageLocationInfos.size()]);
|
||||
fImageLocationInfos.clear();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public int adjustOffsets(TokenList replacement) {
|
||||
private void postProcessTokens(TokenList replacement) {
|
||||
final boolean createImageLocations= fLexOptions.fCreateImageLocations;
|
||||
int offset= 0;
|
||||
Token l= null;
|
||||
for (Token t= replacement.first(); t!=null; t= (Token) t.getNext()) {
|
||||
switch(t.getType()) {
|
||||
case CPreprocessor.tEXPANDED_IDENTIFIER:
|
||||
t.setType(IToken.tIDENTIFIER);
|
||||
if (createImageLocations) {
|
||||
ImageLocationInfo info= createImageLocationInfo(t);
|
||||
if (info != null) {
|
||||
info.fTokenOffsetInExpansion= offset;
|
||||
fImageLocationInfos.add(info);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (createImageLocations) {
|
||||
ImageLocationInfo info= createImageLocationInfo(t);
|
||||
if (info != null) {
|
||||
info.fTokenOffsetInExpansion= offset;
|
||||
fImageLocationInfos.add(info);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
case CPreprocessor.tSPACE:
|
||||
case CPreprocessor.tNOSPACE:
|
||||
|
@ -633,6 +690,5 @@ public class MacroExpander {
|
|||
t.setOffset(offset, ++offset);
|
||||
l= t;
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -217,7 +217,7 @@ class FunctionStyleMacro extends ObjectStyleMacro {
|
|||
char[][] result= new char[length][];
|
||||
System.arraycopy(fParamList, 0, result, 0, length-1);
|
||||
if (fHasVarArgs == VAARGS) {
|
||||
result[length-1] = Keywords.cVA_ARGS;
|
||||
result[length-1]= Keywords.cpELLIPSIS;
|
||||
}
|
||||
else {
|
||||
final char[] param= fParamList[length-1];
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
|
||||
|
@ -18,7 +21,9 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|||
* Represents part of the input to the preprocessor. This may be a file or the result of a macro expansion.
|
||||
* @since 5.0
|
||||
*/
|
||||
abstract class ScannerContext {
|
||||
final class ScannerContext {
|
||||
private static final Token END_TOKEN = new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||
|
||||
public static final Integer BRANCH_IF = new Integer(0);
|
||||
public static final Integer BRANCH_ELIF = new Integer(1);
|
||||
public static final Integer BRANCH_ELSE = new Integer(2);
|
||||
|
@ -26,16 +31,28 @@ abstract class ScannerContext {
|
|||
|
||||
private final ILocationCtx fLocationCtx;
|
||||
private final ScannerContext fParent;
|
||||
|
||||
private final Lexer fLexer;
|
||||
private ArrayList fBranches= null;
|
||||
|
||||
private Token fTokens;
|
||||
|
||||
/**
|
||||
* @param ctx
|
||||
* @param parent context to be used after this context is done.
|
||||
*/
|
||||
public ScannerContext(ILocationCtx ctx, ScannerContext parent) {
|
||||
public ScannerContext(ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||
fLocationCtx= ctx;
|
||||
fParent= parent;
|
||||
fLexer= lexer;
|
||||
}
|
||||
|
||||
public ScannerContext(ILocationCtx ctx, ScannerContext parent, TokenList tokens) {
|
||||
fLocationCtx= ctx;
|
||||
fParent= parent;
|
||||
fLexer= null;
|
||||
fTokens= tokens.first();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the location context associated with this scanner context.
|
||||
*/
|
||||
|
@ -52,37 +69,70 @@ abstract class ScannerContext {
|
|||
}
|
||||
|
||||
/**
|
||||
* Tests whether or not the current identifier of this context are subject to macro-expansion.
|
||||
* Returns the lexer for this context.
|
||||
*/
|
||||
public boolean expandsMacros() {
|
||||
return true;
|
||||
public final Lexer getLexer() {
|
||||
return fLexer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the lexer for a preprocessing directive or <code>null</code> if the current
|
||||
* token is not the start of a preprocessing directive.
|
||||
* <p>
|
||||
* The current token starts a directive, whenever the context supports directives,
|
||||
* and the current token is a pound that occurs as the first token on the line.
|
||||
*/
|
||||
public abstract Lexer getLexerForPPDirective();
|
||||
|
||||
/**
|
||||
* Needs to be called whenever we change over to another branch of conditional
|
||||
* compilation. Returns whether the change is legal at this point or not.
|
||||
*/
|
||||
public abstract boolean changeBranch(Integer state);
|
||||
|
||||
public final boolean changeBranch(Integer branchKind) {
|
||||
if (fBranches == null) {
|
||||
fBranches= new ArrayList();
|
||||
}
|
||||
|
||||
// an if starts a new conditional construct
|
||||
if (branchKind == BRANCH_IF) {
|
||||
fBranches.add(branchKind);
|
||||
return true;
|
||||
}
|
||||
// if we are not inside of an conditional there shouldn't be an #else, #elsif or #end
|
||||
final int pos= fBranches.size()-1;
|
||||
if (pos < 0) {
|
||||
return false;
|
||||
}
|
||||
// an #end just pops one construct.
|
||||
if (branchKind == BRANCH_END) {
|
||||
fBranches.remove(pos);
|
||||
return true;
|
||||
}
|
||||
// #elsif or #else cannot appear after another #else
|
||||
if (fBranches.get(pos) == BRANCH_ELSE) {
|
||||
return false;
|
||||
}
|
||||
// overwrite #if, #elsif with #elsif or #else
|
||||
fBranches.set(pos, branchKind);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current token from this context. When called before calling {@link #nextPPToken()}
|
||||
* a token of type {@link Lexer#tBEFORE_INPUT} will be returned.
|
||||
* @since 5.0
|
||||
*/
|
||||
public abstract Token currentLexerToken();
|
||||
public final Token currentLexerToken() {
|
||||
if (fLexer != null) {
|
||||
return fLexer.currentToken();
|
||||
}
|
||||
if (fTokens != null) {
|
||||
return fTokens;
|
||||
}
|
||||
return END_TOKEN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next token from this context.
|
||||
*/
|
||||
public abstract Token nextPPToken() throws OffsetLimitReachedException;
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
if (fLexer != null) {
|
||||
return fLexer.nextToken();
|
||||
}
|
||||
if (fTokens != null) {
|
||||
fTokens= (Token) fTokens.getNext();
|
||||
}
|
||||
return currentLexerToken();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
/**
|
||||
* Wraps a {@link Lexer} and provides additional information for the preprocessor.
|
||||
* <p>
|
||||
* Note that for parsing the preprocessor directives the lexer is used directly, so this class
|
||||
* is not allowed to store any state about the lexing process.
|
||||
*
|
||||
* since 5.0
|
||||
*/
|
||||
public class ScannerContextFile extends ScannerContext {
|
||||
|
||||
private final Lexer fLexer;
|
||||
private final ArrayList fBranches= new ArrayList();
|
||||
|
||||
public ScannerContextFile(ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||
super(ctx, parent);
|
||||
fLexer= lexer;
|
||||
}
|
||||
|
||||
public Token currentLexerToken() {
|
||||
return fLexer.currentToken();
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
return fLexer.nextToken();
|
||||
}
|
||||
|
||||
public Lexer getLexerForPPDirective() {
|
||||
if (fLexer.currentTokenIsFirstOnLine() && fLexer.currentToken().getType() == IToken.tPOUND) {
|
||||
return fLexer;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean changeBranch(Integer branchKind) {
|
||||
// an if starts a new conditional construct
|
||||
if (branchKind == BRANCH_IF) {
|
||||
fBranches.add(branchKind);
|
||||
return true;
|
||||
}
|
||||
// if we are not inside of an conditional there shouldn't be an #else, #elsif or #end
|
||||
final int pos= fBranches.size()-1;
|
||||
if (pos < 0) {
|
||||
return false;
|
||||
}
|
||||
// an #end just pops one construct.
|
||||
if (branchKind == BRANCH_END) {
|
||||
fBranches.remove(pos);
|
||||
return true;
|
||||
}
|
||||
// #elsif or #else cannot appear after another #else
|
||||
if (fBranches.get(pos) == BRANCH_ELSE) {
|
||||
return false;
|
||||
}
|
||||
// overwrite #if, #elsif with #elsif or #else
|
||||
fBranches.set(pos, branchKind);
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
|
||||
|
||||
public class ScannerContextMacroExpansion extends ScannerContext {
|
||||
private static final Token END_TOKEN = new Token(Lexer.tEND_OF_INPUT, null, 0, 0);
|
||||
|
||||
private Token fTokens;
|
||||
|
||||
public ScannerContextMacroExpansion(ILocationCtx ctx, ScannerContext parent, TokenList tokens) {
|
||||
super(ctx, parent);
|
||||
fTokens= tokens.first();
|
||||
}
|
||||
|
||||
public boolean changeBranch(Integer state) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Token currentLexerToken() {
|
||||
Token t= fTokens;
|
||||
if (t == null) {
|
||||
return END_TOKEN;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
public Lexer getLexerForPPDirective() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Token nextPPToken() {
|
||||
fTokens= (Token) fTokens.getNext();
|
||||
return currentLexerToken();
|
||||
}
|
||||
|
||||
public boolean expandsMacros() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
|
||||
/**
|
||||
* Context used to run the preprocessor while swallowing all tokens.
|
||||
* Needed to process macro-files as specified by the -imacro compiler option of gcc.
|
||||
* @since 5.0
|
||||
*/
|
||||
public class ScannerContextMacroFile extends ScannerContextFile {
|
||||
private final CPreprocessor fCpp;
|
||||
private boolean fSkippingTokens= false;
|
||||
|
||||
public ScannerContextMacroFile(CPreprocessor cpp, ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||
super(ctx, parent, lexer);
|
||||
fCpp= cpp;
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
if (fSkippingTokens) {
|
||||
final Token t= super.nextPPToken();
|
||||
if (t.getType() == Lexer.tEND_OF_INPUT) {
|
||||
fSkippingTokens= false;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
// use preprocessor to read tokens off this context, until this context is done.
|
||||
fSkippingTokens= true;
|
||||
Token t;
|
||||
do {
|
||||
t= fCpp.fetchTokenFromPreprocessor();
|
||||
} while (fSkippingTokens);
|
||||
return t;
|
||||
}
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
|
||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.Keywords;
|
||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
|
||||
/**
|
||||
* Wraps a ScannerContext and modifies its behavior by limiting the tokens
|
||||
* to the ones on the current line. Instead of the newline token an end-of-input
|
||||
* token is returned. The newline token of the underlying context is not consumed.
|
||||
* @since 5.0
|
||||
*/
|
||||
public final class ScannerContextPPDirective extends ScannerContext {
|
||||
|
||||
private static final int STATE_PREVENT_EXPANSION = 1;
|
||||
private static final int STATE_DEFINED_LPAREN = 2;
|
||||
private static final int STATE_DEFINED = 3;
|
||||
private final Lexer fLexer;
|
||||
private Token fToken;
|
||||
private boolean fConvertDefinedToken;
|
||||
private int fPreventMacroExpansion= 0;
|
||||
private int fLastEndOffset;
|
||||
|
||||
public ScannerContextPPDirective(Lexer lexer, boolean convertDefinedToken) {
|
||||
super(null, null);
|
||||
fLexer= lexer;
|
||||
fConvertDefinedToken= convertDefinedToken;
|
||||
|
||||
final Token currentToken = lexer.currentToken();
|
||||
fLastEndOffset= currentToken.getOffset();
|
||||
fToken= convertToken(currentToken);
|
||||
}
|
||||
|
||||
public Token currentLexerToken() {
|
||||
return fToken;
|
||||
}
|
||||
|
||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||
if (fToken.getType() == Lexer.tEND_OF_INPUT) {
|
||||
return fToken;
|
||||
}
|
||||
Token t1= fLexer.nextToken();
|
||||
t1 = convertToken(t1);
|
||||
fToken= t1;
|
||||
|
||||
Token t = t1;
|
||||
return t;
|
||||
}
|
||||
|
||||
public Lexer getLexerForPPDirective() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean changeBranch(Integer state) {
|
||||
return false;
|
||||
}
|
||||
|
||||
private Token convertToken(Token t) {
|
||||
switch (t.getType()) {
|
||||
case Lexer.tNEWLINE:
|
||||
t= new Token(Lexer.tEND_OF_INPUT, null, t.getEndOffset(), t.getEndOffset());
|
||||
break;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
|
||||
t.setType(CPreprocessor.tDEFINED);
|
||||
fPreventMacroExpansion= STATE_DEFINED;
|
||||
}
|
||||
else {
|
||||
switch(fPreventMacroExpansion) {
|
||||
case STATE_DEFINED:
|
||||
case STATE_DEFINED_LPAREN:
|
||||
fPreventMacroExpansion= STATE_PREVENT_EXPANSION;
|
||||
break;
|
||||
default:
|
||||
fPreventMacroExpansion= 0;
|
||||
}
|
||||
}
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
case IToken.tLPAREN:
|
||||
if (fPreventMacroExpansion == STATE_DEFINED) {
|
||||
fPreventMacroExpansion= STATE_DEFINED_LPAREN; // suppress macro-expansion for 'defined (id)'
|
||||
}
|
||||
else {
|
||||
fPreventMacroExpansion= 0;
|
||||
}
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
default:
|
||||
fPreventMacroExpansion= 0;
|
||||
fLastEndOffset= t.getEndOffset();
|
||||
break;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
public boolean expandsMacros() {
|
||||
return fPreventMacroExpansion == 0;
|
||||
}
|
||||
|
||||
public int getLastEndOffset() {
|
||||
return fLastEndOffset;
|
||||
}
|
||||
}
|
|
@ -4293,4 +4293,6 @@ abstract class BaseScanner implements IScanner {
|
|||
|
||||
protected abstract IToken newToken(int signal, char[] buffer);
|
||||
|
||||
public void setComputeImageLocations(boolean val) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,19 +104,20 @@ public class FunctionStyleMacro extends ObjectStyleMacro {
|
|||
return sig;
|
||||
|
||||
int len = name.length + 2 /*()*/;
|
||||
for( int i = 0; i < arglist.length && arglist[i] != null; i++ ){
|
||||
if( i + 1 < arglist.length && arglist[i+1] != null)
|
||||
final char[][] params = getOriginalParameters();
|
||||
for( int i = 0; i < params.length && params[i] != null; i++ ){
|
||||
if( i + 1 < params.length && params[i+1] != null)
|
||||
len += 1; /*,*/
|
||||
len += arglist[i].length;
|
||||
len += params[i].length;
|
||||
}
|
||||
sig = new char[len];
|
||||
System.arraycopy( name, 0, sig, 0, name.length );
|
||||
sig[name.length] = '(';
|
||||
int idx = name.length + 1;
|
||||
for( int i = 0; i < arglist.length && arglist[i] != null; i++ ){
|
||||
System.arraycopy( arglist[i], 0, sig, idx, arglist[i].length );
|
||||
idx += arglist[i].length;
|
||||
if( i + 1 < arglist.length && arglist[i+1] != null )
|
||||
for( int i = 0; i < params.length && params[i] != null; i++ ){
|
||||
System.arraycopy( params[i], 0, sig, idx, params[i].length );
|
||||
idx += params[i].length;
|
||||
if( i + 1 < params.length && params[i+1] != null )
|
||||
sig[idx++] = ',';
|
||||
}
|
||||
sig[idx] = ')';
|
||||
|
|
|
@ -771,6 +771,10 @@ public class LocationMap implements ILocationResolver, IScannerPreprocessorLog {
|
|||
return r_unclear;
|
||||
}
|
||||
|
||||
public IASTFileLocation getExpansionLocation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static interface _IPreprocessorDirective {
|
||||
|
@ -1022,6 +1026,10 @@ public class LocationMap implements ILocationResolver, IScannerPreprocessorLog {
|
|||
this.expansion = exp;
|
||||
}
|
||||
|
||||
public IASTFileLocation getExpansionLocation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public abstract static class Location implements IASTNodeLocation {
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.eclipse.cdt.core.dom.ast.ASTVisitor;
|
|||
import org.eclipse.cdt.core.dom.ast.DOMException;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
|
@ -149,6 +150,10 @@ public class PDOMASTAdapter {
|
|||
public char[] toCharArray() {
|
||||
return fDelegate.toCharArray();
|
||||
}
|
||||
|
||||
public IASTImageLocation getImageLocation() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static class AnonymousEnumeration implements IEnumeration {
|
||||
|
|
|
@ -199,7 +199,7 @@ public abstract class PDOMIndexerTask extends PDOMWriter implements IPDOMIndexer
|
|||
private void internalParseTUs(IWritableIndex index, int readlockCount, Collection sources, Collection headers, IProgressMonitor monitor) throws CoreException, InterruptedException {
|
||||
TodoTaskUpdater taskUpdater = new TodoTaskUpdater();
|
||||
|
||||
int options= AbstractLanguage.OPTION_ADD_COMMENTS;
|
||||
int options= AbstractLanguage.OPTION_ADD_COMMENTS | AbstractLanguage.OPTION_NO_IMAGE_LOCATIONS;
|
||||
if (checkProperty(IndexerPreferences.KEY_SKIP_ALL_REFERENCES)) {
|
||||
options |= AbstractLanguage.OPTION_SKIP_FUNCTION_BODIES;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.eclipse.cdt.core.dom.parser.cpp.GPPParserExtensionConfiguration;
|
|||
import org.eclipse.cdt.core.dom.parser.cpp.GPPScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.cpp.ICPPParserExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.parser.CodeReader;
|
||||
import org.eclipse.cdt.core.parser.IParserLogService;
|
||||
import org.eclipse.cdt.core.parser.IScanner;
|
||||
import org.eclipse.cdt.core.parser.IScannerInfo;
|
||||
import org.eclipse.cdt.core.parser.IScannerInfoProvider;
|
||||
|
@ -37,6 +38,7 @@ import org.eclipse.cdt.core.parser.ParserUtil;
|
|||
import org.eclipse.cdt.core.parser.ScannerInfo;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
||||
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||
import org.eclipse.core.resources.IFile;
|
||||
import org.eclipse.core.resources.IProject;
|
||||
|
@ -123,9 +125,8 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
|||
scannerExtensionConfiguration = CPP_GNU_SCANNER_EXTENSION;
|
||||
else
|
||||
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
||||
|
||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
||||
l, ParserFactory.createDefaultLogService(), scannerExtensionConfiguration, fileCreator);
|
||||
scanner= createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, l, ParserFactory.createDefaultLogService(),
|
||||
scannerExtensionConfiguration, fileCreator);
|
||||
scanner.setScanComments(parseComment);
|
||||
//assume GCC
|
||||
if( l == ParserLanguage.C )
|
||||
|
@ -137,13 +138,11 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
|||
{
|
||||
String dialect = configuration.getParserDialect();
|
||||
if( dialect.equals( dialects[0]) || dialect.equals( dialects[2]))
|
||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
||||
ParserLanguage.C,
|
||||
ParserUtil.getScannerLogService(), C_GNU_SCANNER_EXTENSION, fileCreator);
|
||||
scanner= createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.C,
|
||||
ParserUtil.getScannerLogService(), C_GNU_SCANNER_EXTENSION, fileCreator);
|
||||
else if( dialect.equals( dialects[1] ) || dialect.equals( dialects[3] ))
|
||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
||||
ParserLanguage.CPP,
|
||||
ParserUtil.getScannerLogService(), CPP_GNU_SCANNER_EXTENSION, fileCreator);
|
||||
scanner = createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.CPP,
|
||||
ParserUtil.getScannerLogService(), CPP_GNU_SCANNER_EXTENSION, fileCreator);
|
||||
else
|
||||
throw new UnsupportedDialectException();
|
||||
|
||||
|
@ -208,9 +207,8 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
|||
else
|
||||
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
||||
|
||||
IScanner scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETION_PARSE,
|
||||
l, ParserFactory.createDefaultLogService(),
|
||||
scannerExtensionConfiguration, fileCreator);
|
||||
IScanner scanner= createScanner(reader, scanInfo, ParserMode.COMPLETION_PARSE, l,
|
||||
ParserFactory.createDefaultLogService(), scannerExtensionConfiguration, fileCreator);
|
||||
scanner.setContentAssistMode(offset);
|
||||
|
||||
// assume GCC
|
||||
|
@ -229,6 +227,15 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
|||
IASTCompletionNode node = parser.getCompletionNode();
|
||||
return node;
|
||||
}
|
||||
|
||||
private IScanner createScanner(CodeReader reader, IScannerInfo scanInfo,
|
||||
ParserMode mode, ParserLanguage lang, IParserLogService log,
|
||||
IScannerExtensionConfiguration scanConfig, ICodeReaderFactory fileCreator) {
|
||||
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||
return new CPreprocessor(reader, scanInfo, lang, log, scanConfig, fileCreator);
|
||||
}
|
||||
return new DOMScanner(reader, scanInfo, mode, lang, log, scanConfig, fileCreator);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
|
|
@ -119,7 +119,8 @@ public class CallHierarchyBaseTest extends BaseUITestCase {
|
|||
for (int i=0; i<200; i++) {
|
||||
item= root.getItem(i1);
|
||||
try {
|
||||
if (!"...".equals(item.getText())) {
|
||||
String text= item.getText();
|
||||
if (!"...".equals(text) && !"".equals(text)) {
|
||||
break;
|
||||
}
|
||||
} catch (SWTException e) {
|
||||
|
|
|
@ -33,11 +33,32 @@ public class CompletionTest_MacroRef_NoPrefix extends CompletionProposalsBaseTe
|
|||
"__DATE__",
|
||||
"__FILE__",
|
||||
"__LINE__",
|
||||
"__TIME__"
|
||||
"__STDC__",
|
||||
"__TIME__",
|
||||
"__asm__",
|
||||
"__builtin_constant_p(exp)",
|
||||
"__builtin_va_arg(ap, type)",
|
||||
"__complex__",
|
||||
"__const",
|
||||
"__const__",
|
||||
"__cplusplus",
|
||||
"__extension__",
|
||||
"__imag__",
|
||||
"__inline__",
|
||||
"__null",
|
||||
"__real__",
|
||||
"__restrict",
|
||||
"__restrict__",
|
||||
"__signed__",
|
||||
"__stdcall",
|
||||
"__volatile__"
|
||||
};
|
||||
|
||||
public CompletionTest_MacroRef_NoPrefix(String name) {
|
||||
super(name);
|
||||
setExpectFailure(0); // no bugnumber, the test fails because I added additional macros that
|
||||
// are reported by the CPreprocessor, but not by Scanner2. As soon as we switch over to the
|
||||
// CPreprocessor, the test-case works again.
|
||||
}
|
||||
|
||||
public static Test suite() {
|
||||
|
|
|
@ -93,11 +93,21 @@ public class CElementHyperlinkDetector implements IHyperlinkDetector {
|
|||
IASTName[] selectedNames=
|
||||
lang.getSelectedNames(ast, selection.getOffset(), selection.getLength());
|
||||
|
||||
IRegion linkRegion;
|
||||
IRegion linkRegion= null;
|
||||
if(selectedNames.length > 0 && selectedNames[0] != null) { // found a name
|
||||
linkRegion = new Region(selection.getOffset(), selection.getLength());
|
||||
// prefer include statement over the include name
|
||||
if (selectedNames[0].getParent() instanceof IASTPreprocessorIncludeStatement) {
|
||||
IASTFileLocation loc= selectedNames[0].getParent().getFileLocation();
|
||||
if (loc != null) {
|
||||
linkRegion= new Region(loc.getNodeOffset(), loc.getNodeLength());
|
||||
}
|
||||
}
|
||||
if (linkRegion == null) {
|
||||
linkRegion = new Region(selection.getOffset(), selection.getLength());
|
||||
}
|
||||
}
|
||||
else { // check if we are in an include statement
|
||||
// mstodo- support for old scanner
|
||||
linkRegion = matchIncludeStatement(ast, selection);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.eclipse.jface.text.ITextInputListener;
|
|||
import org.eclipse.jface.text.TypedPosition;
|
||||
import org.eclipse.swt.widgets.Display;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElifStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElseStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorEndifStatement;
|
||||
|
@ -232,21 +232,17 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
|||
|
||||
for (int i = 0; i < preprocStmts.length; i++) {
|
||||
IASTPreprocessorStatement statement = preprocStmts[i];
|
||||
if (!fileName.equals(statement.getContainingFilename())) {
|
||||
IASTFileLocation floc= statement.getFileLocation();
|
||||
if (floc == null || !fileName.equals(floc.getFileName())) {
|
||||
// preprocessor directive is from a different file
|
||||
continue;
|
||||
}
|
||||
IASTNodeLocation[] nodeLocations = statement.getNodeLocations();
|
||||
if (nodeLocations.length != 1) {
|
||||
continue;
|
||||
}
|
||||
IASTNodeLocation stmtLocation= nodeLocations[0];
|
||||
if (statement instanceof IASTPreprocessorIfStatement) {
|
||||
IASTPreprocessorIfStatement ifStmt = (IASTPreprocessorIfStatement)statement;
|
||||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||
if (!ifStmt.taken()) {
|
||||
if (!inInactiveCode) {
|
||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
||||
inactiveCodeStart = floc.getNodeOffset();
|
||||
inInactiveCode = true;
|
||||
}
|
||||
}
|
||||
|
@ -255,7 +251,7 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
|||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||
if (!ifdefStmt.taken()) {
|
||||
if (!inInactiveCode) {
|
||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
||||
inactiveCodeStart = floc.getNodeOffset();
|
||||
inInactiveCode = true;
|
||||
}
|
||||
}
|
||||
|
@ -264,27 +260,27 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
|||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||
if (!ifndefStmt.taken()) {
|
||||
if (!inInactiveCode) {
|
||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
||||
inactiveCodeStart = floc.getNodeOffset();
|
||||
inInactiveCode = true;
|
||||
}
|
||||
}
|
||||
} else if (statement instanceof IASTPreprocessorElseStatement) {
|
||||
IASTPreprocessorElseStatement elseStmt = (IASTPreprocessorElseStatement)statement;
|
||||
if (!elseStmt.taken() && !inInactiveCode) {
|
||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
||||
inactiveCodeStart = floc.getNodeOffset();
|
||||
inInactiveCode = true;
|
||||
} else if (elseStmt.taken() && inInactiveCode) {
|
||||
int inactiveCodeEnd = stmtLocation.getNodeOffset();
|
||||
int inactiveCodeEnd = floc.getNodeOffset();
|
||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
||||
inInactiveCode = false;
|
||||
}
|
||||
} else if (statement instanceof IASTPreprocessorElifStatement) {
|
||||
IASTPreprocessorElifStatement elifStmt = (IASTPreprocessorElifStatement)statement;
|
||||
if (!elifStmt.taken() && !inInactiveCode) {
|
||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
||||
inactiveCodeStart = floc.getNodeOffset();
|
||||
inInactiveCode = true;
|
||||
} else if (elifStmt.taken() && inInactiveCode) {
|
||||
int inactiveCodeEnd = stmtLocation.getNodeOffset();
|
||||
int inactiveCodeEnd = floc.getNodeOffset();
|
||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
||||
inInactiveCode = false;
|
||||
}
|
||||
|
@ -292,7 +288,7 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
|||
try {
|
||||
boolean wasInInactiveCode = ((Boolean)inactiveCodeStack.pop()).booleanValue();
|
||||
if (inInactiveCode && !wasInInactiveCode) {
|
||||
int inactiveCodeEnd = stmtLocation.getNodeOffset() + stmtLocation.getNodeLength();
|
||||
int inactiveCodeEnd = floc.getNodeOffset() + floc.getNodeLength();
|
||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, true, fHighlightKey));
|
||||
}
|
||||
inInactiveCode = wasInInactiveCode;
|
||||
|
|
Loading…
Add table
Reference in a new issue