mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
CPreprocessor can replace DOMScanner.
This commit is contained in:
parent
67bcec2f0f
commit
fffaae4443
48 changed files with 977 additions and 737 deletions
|
@ -1,5 +1,5 @@
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Copyright (c) 2005, 2006 IBM Corporation and others.
|
* Copyright (c) 2005, 2007 IBM Corporation and others.
|
||||||
* All rights reserved. This program and the accompanying materials
|
* All rights reserved. This program and the accompanying materials
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
* which accompanies this distribution, and is available at
|
* which accompanies this distribution, and is available at
|
||||||
|
@ -7,6 +7,7 @@
|
||||||
*
|
*
|
||||||
* Contributors:
|
* Contributors:
|
||||||
* IBM Corporation - initial API and implementation
|
* IBM Corporation - initial API and implementation
|
||||||
|
* Markus Schorn (Wind River Systems)
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
/*
|
/*
|
||||||
* Created on Jun 4, 2003
|
* Created on Jun 4, 2003
|
||||||
|
@ -14,12 +15,15 @@
|
||||||
*/
|
*/
|
||||||
package org.eclipse.cdt.core.model.tests;
|
package org.eclipse.cdt.core.model.tests;
|
||||||
|
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
|
||||||
import junit.framework.Test;
|
import junit.framework.Test;
|
||||||
import junit.framework.TestSuite;
|
import junit.framework.TestSuite;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.model.CModelException;
|
import org.eclipse.cdt.core.model.CModelException;
|
||||||
import org.eclipse.cdt.core.model.IInclude;
|
import org.eclipse.cdt.core.model.IInclude;
|
||||||
import org.eclipse.cdt.core.model.ITranslationUnit;
|
import org.eclipse.cdt.core.model.ITranslationUnit;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author bnicolle
|
* @author bnicolle
|
||||||
|
@ -27,11 +31,14 @@ import org.eclipse.cdt.core.model.ITranslationUnit;
|
||||||
*/
|
*/
|
||||||
public class IIncludeTests extends IntegratedCModelTest {
|
public class IIncludeTests extends IntegratedCModelTest {
|
||||||
|
|
||||||
|
private boolean fUseCPreprocessor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string
|
* @param string
|
||||||
*/
|
*/
|
||||||
public IIncludeTests(String string) {
|
public IIncludeTests(String string) {
|
||||||
super( string );
|
super( string );
|
||||||
|
fUseCPreprocessor= CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -69,57 +76,39 @@ public class IIncludeTests extends IntegratedCModelTest {
|
||||||
assertNotNull("CModelException thrown",c);
|
assertNotNull("CModelException thrown",c);
|
||||||
}
|
}
|
||||||
|
|
||||||
String getIncludeNameList[] = new String[] {
|
LinkedHashMap expectIncludes= new LinkedHashMap();
|
||||||
new String("stdio.h"),
|
expectIncludes.put("stdio.h", Boolean.TRUE);
|
||||||
new String("whatever.h"),
|
expectIncludes.put("whatever.h", Boolean.FALSE);
|
||||||
new String("src/slash.h"),
|
expectIncludes.put("src/slash.h", Boolean.TRUE);
|
||||||
new String("src\\backslash.h"), // that's a single backslash, escaped
|
expectIncludes.put("src\\backslash.h", Boolean.TRUE); // that's a single backslash, escaped
|
||||||
new String("Program Files/space.h"),
|
expectIncludes.put("Program Files/space.h", Boolean.FALSE);
|
||||||
new String("../up1dir.h"),
|
expectIncludes.put("../up1dir.h", Boolean.FALSE);
|
||||||
new String("./samedir.h"),
|
expectIncludes.put("./samedir.h", Boolean.FALSE);
|
||||||
new String("different_extension1.hpp"),
|
expectIncludes.put("different_extension1.hpp", Boolean.FALSE);
|
||||||
new String("different_extension2.hh"),
|
expectIncludes.put("different_extension2.hh", Boolean.FALSE);
|
||||||
new String("different_extension3.x"),
|
expectIncludes.put("different_extension3.x", Boolean.FALSE);
|
||||||
new String("no_extension"),
|
expectIncludes.put("no_extension", Boolean.TRUE);
|
||||||
new String("whitespace_after_hash"),
|
expectIncludes.put("whitespace_after_hash", Boolean.FALSE);
|
||||||
new String("whitespace_before_hash"),
|
expectIncludes.put("whitespace_before_hash", Boolean.FALSE);
|
||||||
new String("resync_after_bad_parse_1"),
|
expectIncludes.put("resync_after_bad_parse_1", Boolean.FALSE);
|
||||||
new String("resync_after_bad_parse_2"),
|
expectIncludes.put("resync_after_bad_parse_2", Boolean.FALSE);
|
||||||
new String("one"), // C-spec does not allow this, but that's OK for our present purposes
|
expectIncludes.put("one", Boolean.FALSE); // C-spec does not allow this, gcc warns and includes, so we should include it, also.
|
||||||
new String("resync_after_bad_parse_3"),
|
expectIncludes.put("resync_after_bad_parse_3", Boolean.FALSE);
|
||||||
new String("invalid.h"), // C-spec does not allow this, but that's OK for our present purposes
|
if (!fUseCPreprocessor) {
|
||||||
new String("myInclude1.h"),
|
expectIncludes.put("invalid.h", Boolean.FALSE); // C-spec does not allow this, but that's OK for our present purposes
|
||||||
new String("vers2.h")
|
}
|
||||||
};
|
expectIncludes.put("myInclude1.h", Boolean.FALSE);
|
||||||
|
expectIncludes.put("vers2.h", Boolean.FALSE);
|
||||||
|
|
||||||
|
String[] getIncludeNameList= (String[]) expectIncludes.keySet().toArray(new String[expectIncludes.size()]);
|
||||||
assertEquals( getIncludeNameList.length, theIncludes.length );
|
assertEquals( getIncludeNameList.length, theIncludes.length );
|
||||||
for( int i=0; i<getIncludeNameList.length; i++ )
|
for( int i=0; i<getIncludeNameList.length; i++ )
|
||||||
{
|
{
|
||||||
IInclude inc1 = theIncludes[i];
|
IInclude inc1 = theIncludes[i];
|
||||||
assertEquals( getIncludeNameList[i], inc1.getIncludeName() );
|
String expectName= getIncludeNameList[i];
|
||||||
|
assertEquals( expectName, inc1.getIncludeName() );
|
||||||
|
assertEquals( ((Boolean) expectIncludes.get(expectName)).booleanValue(), inc1.isStandard());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIsStandard() throws CModelException
|
|
||||||
{
|
|
||||||
ITranslationUnit tu = getTU();
|
|
||||||
IInclude[] theIncludes = null;
|
|
||||||
try {
|
|
||||||
theIncludes = tu.getIncludes();
|
|
||||||
}
|
|
||||||
catch( CModelException c )
|
|
||||||
{
|
|
||||||
assertNotNull("CModelException thrown",c);
|
|
||||||
}
|
|
||||||
boolean isStandardList[] = new boolean[] {
|
|
||||||
true, false
|
|
||||||
};
|
|
||||||
for( int i=0; i<isStandardList.length; i++ )
|
|
||||||
{
|
|
||||||
IInclude inc1 = theIncludes[i];
|
|
||||||
assertEquals( isStandardList[i], inc1.isStandard() );
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
*
|
*
|
||||||
* Contributors:
|
* Contributors:
|
||||||
* IBM Rational Software - Initial API and implementation
|
* IBM Rational Software - Initial API and implementation
|
||||||
|
* Markus Schorn (Wind River Systems)
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.core.parser.tests.ast2;
|
package org.eclipse.cdt.core.parser.tests.ast2;
|
||||||
|
|
||||||
|
@ -14,6 +15,7 @@ import java.io.StringWriter;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.dom.IName;
|
import org.eclipse.cdt.core.dom.IName;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IBinding;
|
import org.eclipse.cdt.core.dom.ast.IBinding;
|
||||||
|
@ -1531,8 +1533,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
||||||
decls = getDeclarationOffTU((IASTName)node);
|
decls = getDeclarationOffTU((IASTName)node);
|
||||||
assertEquals(decls.length, 1);
|
assertEquals(decls.length, 1);
|
||||||
assertEquals( decls[0].toString(), "test" ); //$NON-NLS-1$
|
assertEquals( decls[0].toString(), "test" ); //$NON-NLS-1$
|
||||||
assertEquals( ((ASTNode)decls[0]).getOffset(), 132);
|
assertLocation(code, "test:", 4, decls[0]);
|
||||||
assertEquals( ((ASTNode)decls[0]).getLength(), 4);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBugMethodDef() throws Exception {
|
public void testBugMethodDef() throws Exception {
|
||||||
|
@ -1694,7 +1695,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
||||||
buffer.append("char c; // selection on this fails because offset for \n"); //$NON-NLS-1$
|
buffer.append("char c; // selection on this fails because offset for \n"); //$NON-NLS-1$
|
||||||
buffer.append("_END_STD_C\n"); //$NON-NLS-1$
|
buffer.append("_END_STD_C\n"); //$NON-NLS-1$
|
||||||
buffer.append("char foo() {\n"); //$NON-NLS-1$
|
buffer.append("char foo() {\n"); //$NON-NLS-1$
|
||||||
buffer.append("return c; \n"); //$NON-NLS-1$
|
buffer.append("return c; // ref \n"); //$NON-NLS-1$
|
||||||
buffer.append("}\n"); //$NON-NLS-1$
|
buffer.append("}\n"); //$NON-NLS-1$
|
||||||
|
|
||||||
String code = buffer.toString();
|
String code = buffer.toString();
|
||||||
|
@ -1707,8 +1708,7 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
||||||
IName[] decls = getDeclarationOffTU((IASTName)node);
|
IName[] decls = getDeclarationOffTU((IASTName)node);
|
||||||
assertEquals(decls.length, 1);
|
assertEquals(decls.length, 1);
|
||||||
assertEquals( decls[0].toString(), "c" ); //$NON-NLS-1$
|
assertEquals( decls[0].toString(), "c" ); //$NON-NLS-1$
|
||||||
assertEquals( ((ASTNode)decls[0]).getOffset(), 86);
|
assertLocation(code, "c;", 1, decls[0]);
|
||||||
assertEquals( ((ASTNode)decls[0]).getLength(), 1);
|
|
||||||
|
|
||||||
index = code.indexOf("char c"); //$NON-NLS-1$
|
index = code.indexOf("char c"); //$NON-NLS-1$
|
||||||
node = parse( code, index + 5, index + 6, true );
|
node = parse( code, index + 5, index + 6, true );
|
||||||
|
@ -1718,8 +1718,14 @@ public class DOMSelectionParseTest extends DOMSelectionParseBaseTest {
|
||||||
IName[] refs = getReferencesOffTU((IASTName)node);
|
IName[] refs = getReferencesOffTU((IASTName)node);
|
||||||
assertEquals(refs.length, 1);
|
assertEquals(refs.length, 1);
|
||||||
assertEquals( refs[0].toString(), "c" ); //$NON-NLS-1$
|
assertEquals( refs[0].toString(), "c" ); //$NON-NLS-1$
|
||||||
assertEquals( ((ASTNode)refs[0]).getOffset(), 168);
|
assertLocation(code, "c; // ref", 1, refs[0]);
|
||||||
assertEquals( ((ASTNode)decls[0]).getLength(), 1);
|
}
|
||||||
|
|
||||||
|
private void assertLocation(String code, String occur, int length, IName name) {
|
||||||
|
int offset= code.indexOf(occur);
|
||||||
|
final IASTFileLocation loc= name.getFileLocation();
|
||||||
|
assertEquals(offset, loc.getNodeOffset());
|
||||||
|
assertEquals(length, loc.getNodeLength());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBug92632() throws Exception
|
public void testBug92632() throws Exception
|
||||||
|
|
|
@ -105,7 +105,7 @@ public class LexerTests extends BaseTestCase {
|
||||||
|
|
||||||
private void eof() throws Exception {
|
private void eof() throws Exception {
|
||||||
IToken t= fLexer.nextToken();
|
IToken t= fLexer.nextToken();
|
||||||
assertEquals("superfluous token " + t, Lexer.tEND_OF_INPUT, t.getType());
|
assertEquals("superfluous token " + t, IToken.tEND_OF_INPUT, t.getType());
|
||||||
assertEquals(0, fLog.getProblemCount());
|
assertEquals(0, fLog.getProblemCount());
|
||||||
assertEquals(0, fLog.getCommentCount());
|
assertEquals(0, fLog.getCommentCount());
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,12 +139,18 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkLocation(IASTFileLocation loc, String filename, int offset, int length, int line, int endline) {
|
private void checkLocation(IASTFileLocation loc, String filename, int offset, int length, int line, int endline) {
|
||||||
|
if (loc == null) {
|
||||||
|
assertEquals(0, offset);
|
||||||
|
assertEquals(0, length);
|
||||||
|
}
|
||||||
|
else {
|
||||||
assertEquals(filename, loc.getFileName());
|
assertEquals(filename, loc.getFileName());
|
||||||
assertEquals(offset, loc.getNodeOffset());
|
assertEquals(offset, loc.getNodeOffset());
|
||||||
assertEquals(length, loc.getNodeLength());
|
assertEquals(length, loc.getNodeLength());
|
||||||
assertEquals(line, loc.getStartingLineNumber());
|
assertEquals(line, loc.getStartingLineNumber());
|
||||||
assertEquals(endline, loc.getEndingLineNumber());
|
assertEquals(endline, loc.getEndingLineNumber());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void checkComment(IASTComment comment, String content, boolean blockComment,
|
private void checkComment(IASTComment comment, String content, boolean blockComment,
|
||||||
String filename, int offset, int length, int line, int endline) {
|
String filename, int offset, int length, int line, int endline) {
|
||||||
|
@ -319,7 +325,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
IASTPreprocessorIncludeStatement[] includes= fLocationMap.getIncludeDirectives();
|
IASTPreprocessorIncludeStatement[] includes= fLocationMap.getIncludeDirectives();
|
||||||
assertEquals(2, includes.length);
|
assertEquals(2, includes.length);
|
||||||
checkInclude(includes[0], "", "", "n1", "", true, false, FN, 0, 0, 1, 0, 0);
|
checkInclude(includes[0], "", "", "n1", "", true, false, FN, 0, 0, 1, 0, 0);
|
||||||
checkInclude(includes[1], "012", "12", "n2", "f2", false, true, FN, 0, 3, 1, 1, 2);
|
checkInclude(includes[1], new String(DIGITS), "12", "n2", "f2", false, true, FN, 0, 16, 1, 1, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIf() {
|
public void testIf() {
|
||||||
|
@ -434,7 +440,6 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMacroExpansion() {
|
public void testMacroExpansion() {
|
||||||
ImageLocationInfo ili= new ImageLocationInfo();
|
|
||||||
IMacroBinding macro1= new TestMacro("n1", "exp1", null);
|
IMacroBinding macro1= new TestMacro("n1", "exp1", null);
|
||||||
IMacroBinding macro2= new TestMacro("n2", "exp2", null);
|
IMacroBinding macro2= new TestMacro("n2", "exp2", null);
|
||||||
IMacroBinding macro3= new TestMacro("n3", "exp3", null);
|
IMacroBinding macro3= new TestMacro("n3", "exp3", null);
|
||||||
|
@ -444,8 +449,8 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
fLocationMap.registerPredefinedMacro(macro1);
|
fLocationMap.registerPredefinedMacro(macro1);
|
||||||
fLocationMap.registerMacroFromIndex(macro2, "ifile", 2, 12, 32);
|
fLocationMap.registerMacroFromIndex(macro2, "ifile", 2, 12, 32);
|
||||||
fLocationMap.encounterPoundDefine(3, 13, 33, 63, 103, macro3);
|
fLocationMap.encounterPoundDefine(3, 13, 33, 63, 103, macro3);
|
||||||
IASTName name1= fLocationMap.encounterImplicitMacroExpansion(macro1, ili);
|
IASTName name1= fLocationMap.encounterImplicitMacroExpansion(macro1, null);
|
||||||
IASTName name2= fLocationMap.encounterImplicitMacroExpansion(macro2, ili);
|
IASTName name2= fLocationMap.encounterImplicitMacroExpansion(macro2, null);
|
||||||
fLocationMap.pushMacroExpansion(110, 115, 125, 30, macro3, new IASTName[]{name1, name2}, new ImageLocationInfo[0]);
|
fLocationMap.pushMacroExpansion(110, 115, 125, 30, macro3, new IASTName[]{name1, name2}, new ImageLocationInfo[0]);
|
||||||
fLocationMap.encounteredComment(12, 23, false);
|
fLocationMap.encounteredComment(12, 23, false);
|
||||||
checkComment(fLocationMap.getComments()[0], new String(LONGDIGITS, 110, 15), false, FN, 110, 15, 2, 2);
|
checkComment(fLocationMap.getComments()[0], new String(LONGDIGITS, 110, 15), false, FN, 110, 15, 2, 2);
|
||||||
|
@ -551,7 +556,7 @@ public class LocationMapTests extends BaseTestCase {
|
||||||
|
|
||||||
inclusions= inclusions[0].getNestedInclusions();
|
inclusions= inclusions[0].getNestedInclusions();
|
||||||
assertEquals(1, inclusions.length);
|
assertEquals(1, inclusions.length);
|
||||||
checkInclude(inclusions[0].getIncludeDirective(), "b4", "4", "pre11", "pre11", false, true, "pre1", 6, 2, 1, 7, 1);
|
checkInclude(inclusions[0].getIncludeDirective(), "b4b", "4", "pre11", "pre11", false, true, "pre1", 6, 3, 1, 7, 1);
|
||||||
assertEquals(0, inclusions[0].getNestedInclusions().length);
|
assertEquals(0, inclusions[0].getNestedInclusions().length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
package org.eclipse.cdt.core.model;
|
package org.eclipse.cdt.core.model;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.dom.ICodeReaderFactory;
|
import org.eclipse.cdt.core.dom.ICodeReaderFactory;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||||
import org.eclipse.cdt.core.index.IIndex;
|
import org.eclipse.cdt.core.index.IIndex;
|
||||||
import org.eclipse.cdt.core.parser.CodeReader;
|
import org.eclipse.cdt.core.parser.CodeReader;
|
||||||
|
@ -38,6 +39,13 @@ public abstract class AbstractLanguage extends PlatformObject implements ILangua
|
||||||
*/
|
*/
|
||||||
public final static int OPTION_ADD_COMMENTS= 2;
|
public final static int OPTION_ADD_COMMENTS= 2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Option for {@link #getASTTranslationUnit(CodeReader, IScannerInfo, ICodeReaderFactory, IIndex, int, IParserLogService)}
|
||||||
|
* Performance optimization, instructs the parser not to create image-locations.
|
||||||
|
* When using this option {@link IASTName#getImageLocation()} will always return <code>null</code>.
|
||||||
|
*/
|
||||||
|
public final static int OPTION_NO_IMAGE_LOCATIONS= 4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @deprecated, throws an UnsupportedOperationException
|
* @deprecated, throws an UnsupportedOperationException
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -215,7 +215,7 @@ public class AsmModelBuilder implements IContributedModelBuilder {
|
||||||
Token token;
|
Token token;
|
||||||
try {
|
try {
|
||||||
token= fLexer.nextToken();
|
token= fLexer.nextToken();
|
||||||
if (token.getType() == Lexer.tEND_OF_INPUT) {
|
if (token.getType() == IToken.tEND_OF_INPUT) {
|
||||||
token = null;
|
token = null;
|
||||||
}
|
}
|
||||||
} catch (OffsetLimitReachedException exc) {
|
} catch (OffsetLimitReachedException exc) {
|
||||||
|
|
|
@ -727,7 +727,7 @@ public class ASTSignatureUtil {
|
||||||
StringBuffer result = new StringBuffer();
|
StringBuffer result = new StringBuffer();
|
||||||
result.append(expression.toString());
|
result.append(expression.toString());
|
||||||
if (expression.getKind() == IASTLiteralExpression.lk_string_literal) {
|
if (expression.getKind() == IASTLiteralExpression.lk_string_literal) {
|
||||||
// mstodo- support for old scanner
|
// mstodo- old scanner, remove
|
||||||
if (result.length() == 0 || result.charAt(0) != '"') {
|
if (result.length() == 0 || result.charAt(0) != '"') {
|
||||||
result.insert(0, '"');
|
result.insert(0, '"');
|
||||||
result.append('"');
|
result.append('"');
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*******************************************************************************
|
||||||
|
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||||
|
* All rights reserved. This program and the accompanying materials
|
||||||
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
|
* which accompanies this distribution, and is available at
|
||||||
|
* http://www.eclipse.org/legal/epl-v10.html
|
||||||
|
*
|
||||||
|
* Contributors:
|
||||||
|
* Markus Schorn - initial API and implementation
|
||||||
|
*******************************************************************************/
|
||||||
|
package org.eclipse.cdt.core.dom.ast;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>
|
||||||
|
* <strong>EXPERIMENTAL</strong>. This class or interface has been added as
|
||||||
|
* part of a work in progress. There is no guarantee that this API will
|
||||||
|
* work or that it will remain the same. Please do not use this API without
|
||||||
|
* consulting with the CDT team.
|
||||||
|
* </p>
|
||||||
|
* @since 5.0
|
||||||
|
*
|
||||||
|
* An image location explains how a name made it into the translation unit.
|
||||||
|
*/
|
||||||
|
public interface IASTImageLocation extends IASTFileLocation {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The image is part of the code that has not been modified by the preprocessor.
|
||||||
|
*/
|
||||||
|
final int REGULAR_CODE= 1;
|
||||||
|
/**
|
||||||
|
* The image is part of a macro definition and was introduced by some macro expansion.
|
||||||
|
*/
|
||||||
|
final int MACRO_DEFINITION= 2;
|
||||||
|
/**
|
||||||
|
* The image is part of an argument of an explicit macro expansion.
|
||||||
|
*/
|
||||||
|
final int ARGUMENT_TO_MACRO_EXPANSION= 3;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the kind of image-location, one of {@link #REGULAR_CODE}, {@link #MACRO_DEFINITION} or
|
||||||
|
* {@link #ARGUMENT_TO_MACRO_EXPANSION}.
|
||||||
|
*/
|
||||||
|
public int getLocationKind();
|
||||||
|
}
|
|
@ -62,4 +62,18 @@ public interface IASTName extends IASTNode, IName {
|
||||||
* Determines the current linkage in which the name has to be resolved.
|
* Determines the current linkage in which the name has to be resolved.
|
||||||
*/
|
*/
|
||||||
public ILinkage getLinkage();
|
public ILinkage getLinkage();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the image location for this name or <code>null</code> if the information is not available.
|
||||||
|
* <p>
|
||||||
|
* An image location can be computed when the name is either found directly in the code, is (part of)
|
||||||
|
* an argument to a macro expansion or is (part of) a macro definition found in the source code.
|
||||||
|
* <p>
|
||||||
|
* The image location is <code>null</code>, when the name consists of multiple tokens (qualified names)
|
||||||
|
* and the tokens are not found side by side in the code or if
|
||||||
|
* the name is the result of a token-paste operation or the name is found in the definition of a
|
||||||
|
* built-in macro.
|
||||||
|
* @since 5.0
|
||||||
|
*/
|
||||||
|
public IASTImageLocation getImageLocation();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Copyright (c) 2004, 2005 IBM Corporation and others.
|
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||||
* All rights reserved. This program and the accompanying materials
|
* All rights reserved. This program and the accompanying materials
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
* which accompanies this distribution, and is available at
|
* which accompanies this distribution, and is available at
|
||||||
|
@ -7,6 +7,7 @@
|
||||||
*
|
*
|
||||||
* Contributors:
|
* Contributors:
|
||||||
* IBM - Initial API and implementation
|
* IBM - Initial API and implementation
|
||||||
|
* Markus Schorn (Wind River Systems)
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.core.dom.ast;
|
package org.eclipse.cdt.core.dom.ast;
|
||||||
|
|
||||||
|
@ -46,6 +47,13 @@ public interface IASTPreprocessorMacroDefinition extends
|
||||||
*/
|
*/
|
||||||
public String getExpansion();
|
public String getExpansion();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the location of the macro expansion, or <code>null</code> if not supported.
|
||||||
|
* For built-in macros the location will always be null.
|
||||||
|
* @since 5.0
|
||||||
|
*/
|
||||||
|
public IASTFileLocation getExpansionLocation();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the macro expansion.
|
* Set the macro expansion.
|
||||||
*
|
*
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||||
import org.eclipse.cdt.core.dom.ast.c.CASTVisitor;
|
import org.eclipse.cdt.core.dom.ast.c.CASTVisitor;
|
||||||
|
import org.eclipse.cdt.core.dom.parser.AbstractScannerExtensionConfiguration;
|
||||||
import org.eclipse.cdt.core.dom.parser.IScannerExtensionConfiguration;
|
import org.eclipse.cdt.core.dom.parser.IScannerExtensionConfiguration;
|
||||||
import org.eclipse.cdt.core.dom.parser.ISourceCodeParser;
|
import org.eclipse.cdt.core.dom.parser.ISourceCodeParser;
|
||||||
import org.eclipse.cdt.core.index.IIndex;
|
import org.eclipse.cdt.core.index.IIndex;
|
||||||
|
@ -39,6 +40,7 @@ import org.eclipse.cdt.core.parser.ParserLanguage;
|
||||||
import org.eclipse.cdt.core.parser.ParserMode;
|
import org.eclipse.cdt.core.parser.ParserMode;
|
||||||
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
||||||
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||||
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
||||||
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
||||||
|
@ -58,7 +60,6 @@ import org.eclipse.core.runtime.CoreException;
|
||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* @see AbstractScannerExtensionConfiguration
|
* @see AbstractScannerExtensionConfiguration
|
||||||
* @see AbstractCParserExtensionConfiguration
|
|
||||||
*
|
*
|
||||||
* @since 4.0
|
* @since 4.0
|
||||||
*/
|
*/
|
||||||
|
@ -98,6 +99,8 @@ public abstract class AbstractCLanguage extends AbstractLanguage implements ICLa
|
||||||
|
|
||||||
IScanner scanner= createScanner(reader, scanInfo, codeReaderFactory, log);
|
IScanner scanner= createScanner(reader, scanInfo, codeReaderFactory, log);
|
||||||
scanner.setScanComments((options & OPTION_ADD_COMMENTS) != 0);
|
scanner.setScanComments((options & OPTION_ADD_COMMENTS) != 0);
|
||||||
|
scanner.setComputeImageLocations((options & AbstractLanguage.OPTION_NO_IMAGE_LOCATIONS) == 0);
|
||||||
|
|
||||||
ISourceCodeParser parser= createParser(scanner, log, index, false, options);
|
ISourceCodeParser parser= createParser(scanner, log, index, false, options);
|
||||||
|
|
||||||
// Parse
|
// Parse
|
||||||
|
@ -149,6 +152,9 @@ public abstract class AbstractCLanguage extends AbstractLanguage implements ICLa
|
||||||
* @return an instance of IScanner
|
* @return an instance of IScanner
|
||||||
*/
|
*/
|
||||||
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
||||||
|
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||||
|
return new CPreprocessor(reader, scanInfo, ParserLanguage.C, log, getScannerExtensionConfiguration(), fileCreator);
|
||||||
|
}
|
||||||
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.C,
|
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.C,
|
||||||
log, getScannerExtensionConfiguration(), fileCreator);
|
log, getScannerExtensionConfiguration(), fileCreator);
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.eclipse.cdt.core.parser.ParserLanguage;
|
||||||
import org.eclipse.cdt.core.parser.ParserMode;
|
import org.eclipse.cdt.core.parser.ParserMode;
|
||||||
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
import org.eclipse.cdt.core.parser.util.CharArrayIntMap;
|
||||||
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||||
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
import org.eclipse.cdt.internal.core.parser.token.KeywordSets;
|
||||||
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
import org.eclipse.cdt.internal.core.pdom.dom.IPDOMLinkageFactory;
|
||||||
|
@ -147,6 +148,9 @@ public abstract class AbstractCPPLanguage extends AbstractLanguage implements IC
|
||||||
* @return an instance of IScanner
|
* @return an instance of IScanner
|
||||||
*/
|
*/
|
||||||
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
protected IScanner createScanner(CodeReader reader, IScannerInfo scanInfo, ICodeReaderFactory fileCreator, IParserLogService log) {
|
||||||
|
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||||
|
return new CPreprocessor(reader, scanInfo, ParserLanguage.CPP, log, getScannerExtensionConfiguration(), fileCreator);
|
||||||
|
}
|
||||||
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.CPP,
|
return new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.CPP,
|
||||||
log, getScannerExtensionConfiguration(), fileCreator);
|
log, getScannerExtensionConfiguration(), fileCreator);
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,6 +42,13 @@ public interface IScanner extends IMacroCollector {
|
||||||
*/
|
*/
|
||||||
public void setScanComments(boolean val);
|
public void setScanComments(boolean val);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Turns on/off creation of image locations.
|
||||||
|
* @see IASTName#getImageLocation().
|
||||||
|
* @since 5.0
|
||||||
|
*/
|
||||||
|
public void setComputeImageLocations(boolean val);
|
||||||
|
|
||||||
public IMacro addDefinition(char[] key, char[] value);
|
public IMacro addDefinition(char[] key, char[] value);
|
||||||
public IMacro addDefinition(char[] name, char[][] params, char[] expansion);
|
public IMacro addDefinition(char[] name, char[][] params, char[] expansion);
|
||||||
public void addDefinition(IMacro macro);
|
public void addDefinition(IMacro macro);
|
||||||
|
|
|
@ -242,8 +242,9 @@ public interface IToken {
|
||||||
static public final int tBLOCKCOMMENT = 143;
|
static public final int tBLOCKCOMMENT = 143;
|
||||||
/** @deprecated don't use it */
|
/** @deprecated don't use it */
|
||||||
static public final int tLAST = 143;
|
static public final int tLAST = 143;
|
||||||
|
static public final int tEND_OF_INPUT= 144;
|
||||||
|
|
||||||
int FIRST_RESERVED_IGCCToken = 144;
|
int FIRST_RESERVED_IGCCToken = 150;
|
||||||
int LAST_RESERVED_IGCCToken = 199;
|
int LAST_RESERVED_IGCCToken = 199;
|
||||||
|
|
||||||
int FIRST_RESERVED_IExtensionToken = 243;
|
int FIRST_RESERVED_IExtensionToken = 243;
|
||||||
|
|
|
@ -14,6 +14,7 @@ package org.eclipse.cdt.internal.core.dom.parser;
|
||||||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||||
import org.eclipse.cdt.core.dom.ast.ASTVisitor;
|
import org.eclipse.cdt.core.dom.ast.ASTVisitor;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
|
||||||
|
@ -97,6 +98,17 @@ public abstract class ASTNode implements IASTNode {
|
||||||
return locations;
|
return locations;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation getImageLocation() {
|
||||||
|
final IASTTranslationUnit tu= getTranslationUnit();
|
||||||
|
if (tu != null) {
|
||||||
|
ILocationResolver l= (ILocationResolver) tu.getAdapter(ILocationResolver.class);
|
||||||
|
if (l != null) {
|
||||||
|
return l.getImageLocation(offset, length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public String getRawSignature() {
|
public String getRawSignature() {
|
||||||
final IASTFileLocation floc= getFileLocation();
|
final IASTFileLocation floc= getFileLocation();
|
||||||
final IASTTranslationUnit ast = getTranslationUnit();
|
final IASTTranslationUnit ast = getTranslationUnit();
|
||||||
|
@ -106,7 +118,7 @@ public abstract class ASTNode implements IASTNode {
|
||||||
return new String(lr.getUnpreprocessedSignature(getFileLocation()));
|
return new String(lr.getUnpreprocessedSignature(getFileLocation()));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// mstodo- support for old location map
|
// mstodo- old location resolver, remove
|
||||||
return ast.getUnpreprocessedSignature(getNodeLocations());
|
return ast.getUnpreprocessedSignature(getNodeLocations());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,6 +132,9 @@ public abstract class ASTNode implements IASTNode {
|
||||||
public IASTFileLocation getFileLocation() {
|
public IASTFileLocation getFileLocation() {
|
||||||
if( fileLocation != null )
|
if( fileLocation != null )
|
||||||
return fileLocation;
|
return fileLocation;
|
||||||
|
if (offset == 0 && length == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
IASTTranslationUnit ast = getTranslationUnit();
|
IASTTranslationUnit ast = getTranslationUnit();
|
||||||
if (ast != null) {
|
if (ast != null) {
|
||||||
ILocationResolver lr= (ILocationResolver) ast.getAdapter(ILocationResolver.class);
|
ILocationResolver lr= (ILocationResolver) ast.getAdapter(ILocationResolver.class);
|
||||||
|
|
|
@ -154,7 +154,7 @@ public abstract class AbstractGNUSourceCodeParser implements ISourceCodeParser {
|
||||||
|
|
||||||
// Use to create the completion node
|
// Use to create the completion node
|
||||||
protected ASTCompletionNode createCompletionNode(IToken token) {
|
protected ASTCompletionNode createCompletionNode(IToken token) {
|
||||||
if (completionNode == null)
|
if (completionNode == null && token != null)
|
||||||
completionNode = new ASTCompletionNode(token, getTranslationUnit());
|
completionNode = new ASTCompletionNode(token, getTranslationUnit());
|
||||||
return completionNode;
|
return completionNode;
|
||||||
}
|
}
|
||||||
|
@ -284,6 +284,7 @@ public abstract class AbstractGNUSourceCodeParser implements ISourceCodeParser {
|
||||||
OffsetLimitReachedException exception) throws EndOfFileException {
|
OffsetLimitReachedException exception) throws EndOfFileException {
|
||||||
if (mode != ParserMode.COMPLETION_PARSE)
|
if (mode != ParserMode.COMPLETION_PARSE)
|
||||||
throw new EndOfFileException();
|
throw new EndOfFileException();
|
||||||
|
createCompletionNode(exception.getFinalToken());
|
||||||
throw exception;
|
throw exception;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -409,7 +409,7 @@ public class CASTTranslationUnit extends CASTNode implements
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// mstodo- support for old location resolver
|
// mstodo- old location resolver remove
|
||||||
IASTNode node = null;
|
IASTNode node = null;
|
||||||
ASTPreprocessorSelectionResult result = null;
|
ASTPreprocessorSelectionResult result = null;
|
||||||
int globalOffset = 0;
|
int globalOffset = 0;
|
||||||
|
|
|
@ -399,7 +399,7 @@ public class CPPASTTranslationUnit extends CPPASTNode implements
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// mstodo- support for old location resolver
|
// mstodo- old location resolver, remove
|
||||||
IASTNode node = null;
|
IASTNode node = null;
|
||||||
ASTPreprocessorSelectionResult result = null;
|
ASTPreprocessorSelectionResult result = null;
|
||||||
int globalOffset = 0;
|
int globalOffset = 0;
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.eclipse.cdt.core.dom.ILinkage;
|
||||||
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||||
|
@ -115,10 +116,6 @@ class ASTBuiltinName extends ASTPreprocessorDefinition {
|
||||||
return new IASTNodeLocation[]{fFileLocation};
|
return new IASTNodeLocation[]{fFileLocation};
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getOffset() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRawSignature() {
|
public String getRawSignature() {
|
||||||
if (fFileLocation == null) {
|
if (fFileLocation == null) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
@ -128,13 +125,27 @@ class ASTBuiltinName extends ASTPreprocessorDefinition {
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTMacroReferenceName extends ASTPreprocessorName {
|
class ASTMacroReferenceName extends ASTPreprocessorName {
|
||||||
|
private ImageLocationInfo fImageLocationInfo;
|
||||||
|
|
||||||
public ASTMacroReferenceName(IASTNode parent, int offset, int endOffset, IMacroBinding macro, ImageLocationInfo imgLocationInfo) {
|
public ASTMacroReferenceName(IASTNode parent, int offset, int endOffset, IMacroBinding macro, ImageLocationInfo imgLocationInfo) {
|
||||||
super(parent, IASTTranslationUnit.EXPANSION_NAME, offset, endOffset, macro.getNameCharArray(), macro);
|
super(parent, IASTTranslationUnit.EXPANSION_NAME, offset, endOffset, macro.getNameCharArray(), macro);
|
||||||
|
fImageLocationInfo= imgLocationInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isReference() {
|
public boolean isReference() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// mstodo- image-locations.
|
public IASTImageLocation getImageLocation() {
|
||||||
|
if (fImageLocationInfo != null) {
|
||||||
|
IASTTranslationUnit tu= getTranslationUnit();
|
||||||
|
if (tu != null) {
|
||||||
|
LocationMap lr= (LocationMap) tu.getAdapter(LocationMap.class);
|
||||||
|
if (lr != null) {
|
||||||
|
return fImageLocationInfo.createLocation(lr, fImageLocationInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.eclipse.cdt.core.dom.ast.ASTNodeProperty;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFunctionStyleMacroParameter;
|
import org.eclipse.cdt.core.dom.ast.IASTFunctionStyleMacroParameter;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTMacroExpansion;
|
import org.eclipse.cdt.core.dom.ast.IASTMacroExpansion;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
|
@ -67,10 +68,6 @@ abstract class ASTPreprocessorNode extends ASTNode {
|
||||||
return CharArrayUtils.EMPTY;
|
return CharArrayUtils.EMPTY;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IASTNodeLocation[] getNodeLocations() {
|
|
||||||
return super.getNodeLocations();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getContainingFilename() {
|
public String getContainingFilename() {
|
||||||
if (super.getOffset() == -1) {
|
if (super.getOffset() == -1) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
@ -228,9 +225,10 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
||||||
private final boolean fIsResolved;
|
private final boolean fIsResolved;
|
||||||
private final boolean fIsSystemInclude;
|
private final boolean fIsSystemInclude;
|
||||||
|
|
||||||
public ASTInclusionStatement(IASTTranslationUnit parent, int startNumber, int nameStartNumber, int nameEndNumber,
|
public ASTInclusionStatement(IASTTranslationUnit parent,
|
||||||
|
int startNumber, int nameStartNumber, int nameEndNumber, int endNumber,
|
||||||
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
char[] headerName, String filePath, boolean userInclude, boolean active) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, nameEndNumber);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||||
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, nameStartNumber, nameEndNumber, headerName, null);
|
||||||
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
fPath= filePath == null ? "" : filePath; //$NON-NLS-1$
|
||||||
fIsActive= active;
|
fIsActive= active;
|
||||||
|
@ -270,6 +268,7 @@ class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreproces
|
||||||
|
|
||||||
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyleMacroDefinition {
|
class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyleMacroDefinition {
|
||||||
private final ASTPreprocessorName fName;
|
private final ASTPreprocessorName fName;
|
||||||
|
private final int fExpansionNumber;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Regular constructor.
|
* Regular constructor.
|
||||||
|
@ -277,6 +276,7 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
||||||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro,
|
||||||
int startNumber, int nameNumber, int nameEndNumber, int expansionNumber, int endNumber) {
|
int startNumber, int nameNumber, int nameEndNumber, int expansionNumber, int endNumber) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber);
|
||||||
|
fExpansionNumber= expansionNumber;
|
||||||
fName= new ASTPreprocessorDefinition(this, IASTPreprocessorMacroDefinition.MACRO_NAME, nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
fName= new ASTPreprocessorDefinition(this, IASTPreprocessorMacroDefinition.MACRO_NAME, nameNumber, nameEndNumber, macro.getNameCharArray(), macro);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,6 +287,7 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
||||||
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro, String filename, int nameOffset, int nameEndOffset, int expansionOffset) {
|
public ASTMacro(IASTTranslationUnit parent, IMacroBinding macro, String filename, int nameOffset, int nameEndOffset, int expansionOffset) {
|
||||||
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, -1, -1);
|
super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, -1, -1);
|
||||||
fName= new ASTBuiltinName(this, IASTPreprocessorMacroDefinition.MACRO_NAME, filename, nameOffset, nameEndOffset, macro.getNameCharArray(), macro);
|
fName= new ASTBuiltinName(this, IASTPreprocessorMacroDefinition.MACRO_NAME, filename, nameOffset, nameEndOffset, macro.getNameCharArray(), macro);
|
||||||
|
fExpansionNumber= -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected IMacroBinding getMacro() {
|
protected IMacroBinding getMacro() {
|
||||||
|
@ -316,6 +317,19 @@ class ASTMacro extends ASTPreprocessorNode implements IASTPreprocessorObjectStyl
|
||||||
|
|
||||||
public void setExpansion(String exp) {assert false;}
|
public void setExpansion(String exp) {assert false;}
|
||||||
public void setName(IASTName name) {assert false;}
|
public void setName(IASTName name) {assert false;}
|
||||||
|
|
||||||
|
public IASTFileLocation getExpansionLocation() {
|
||||||
|
if (fExpansionNumber >= 0) {
|
||||||
|
IASTTranslationUnit ast = getTranslationUnit();
|
||||||
|
if (ast != null) {
|
||||||
|
ILocationResolver lr= (ILocationResolver) ast.getAdapter(ILocationResolver.class);
|
||||||
|
if (lr != null) {
|
||||||
|
return lr.getMappedFileLocation(fExpansionNumber, getOffset() + getLength() - fExpansionNumber);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTMacroParameter extends ASTPreprocessorNode implements IASTFunctionStyleMacroParameter {
|
class ASTMacroParameter extends ASTPreprocessorNode implements IASTFunctionStyleMacroParameter {
|
||||||
|
@ -502,6 +516,10 @@ class ASTMacroExpansionLocation implements IASTMacroExpansion {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return fContext.getMacroDefinition().getName().toString() + "[" + fOffset + "," + (fOffset+fLength) + ")"; //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
|
return fContext.getMacroDefinition().getName().toString() + "[" + fOffset + "," + (fOffset+fLength) + ")"; //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation getImageLocation() {
|
||||||
|
return fContext.getImageLocation(fOffset, fLength);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ASTFileLocationForBuiltins implements IASTFileLocation {
|
class ASTFileLocationForBuiltins implements IASTFileLocation {
|
||||||
|
@ -541,3 +559,16 @@ class ASTFileLocationForBuiltins implements IASTFileLocation {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ASTImageLocation extends ASTFileLocationForBuiltins implements IASTImageLocation {
|
||||||
|
private final int fKind;
|
||||||
|
|
||||||
|
public ASTImageLocation(int kind, String file, int offset, int length) {
|
||||||
|
super(file, offset, length);
|
||||||
|
fKind= kind;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getLocationKind() {
|
||||||
|
return fKind;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -168,14 +168,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
private final MacroExpander fMacroExpander;
|
private final MacroExpander fMacroExpander;
|
||||||
|
|
||||||
// configuration
|
// configuration
|
||||||
final private ParserLanguage fLanguage;
|
|
||||||
final private LexerOptions fLexOptions= new LexerOptions();
|
final private LexerOptions fLexOptions= new LexerOptions();
|
||||||
private boolean fCheckNumbers;
|
|
||||||
final private char[] fAdditionalNumericLiteralSuffixes;
|
final private char[] fAdditionalNumericLiteralSuffixes;
|
||||||
final private CharArrayIntMap fKeywords;
|
final private CharArrayIntMap fKeywords;
|
||||||
final private CharArrayIntMap fPPKeywords;
|
final private CharArrayIntMap fPPKeywords;
|
||||||
final private String[] fIncludePaths;
|
final private String[] fIncludePaths;
|
||||||
final private String[] fQuoteIncludePaths;
|
final private String[] fQuoteIncludePaths;
|
||||||
|
private String[][] fPreIncludedFiles= null;
|
||||||
|
|
||||||
private int fContentAssistLimit= -1;
|
private int fContentAssistLimit= -1;
|
||||||
|
|
||||||
|
@ -193,15 +192,12 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
|
|
||||||
private boolean isCancelled = false;
|
private boolean isCancelled = false;
|
||||||
|
|
||||||
private Token fPrefetchedToken;
|
private Token fPrefetchedTokens;
|
||||||
private Token fLastToken;
|
private Token fLastToken;
|
||||||
private boolean fExpandingMacro;
|
|
||||||
|
|
||||||
public CPreprocessor(CodeReader reader, IScannerInfo info, ParserLanguage language, IParserLogService log,
|
public CPreprocessor(CodeReader reader, IScannerInfo info, ParserLanguage language, IParserLogService log,
|
||||||
IScannerExtensionConfiguration configuration, ICodeReaderFactory readerFactory) {
|
IScannerExtensionConfiguration configuration, ICodeReaderFactory readerFactory) {
|
||||||
fLanguage= language;
|
|
||||||
fLog = log;
|
fLog = log;
|
||||||
fCheckNumbers= true;
|
|
||||||
fAdditionalNumericLiteralSuffixes= nonNull(configuration.supportAdditionalNumericLiteralSuffixes());
|
fAdditionalNumericLiteralSuffixes= nonNull(configuration.supportAdditionalNumericLiteralSuffixes());
|
||||||
fLexOptions.fSupportDollarInitializers= configuration.support$InIdentifiers();
|
fLexOptions.fSupportDollarInitializers= configuration.support$InIdentifiers();
|
||||||
fLexOptions.fSupportMinAndMax = configuration.supportMinAndMaxOperators();
|
fLexOptions.fSupportMinAndMax = configuration.supportMinAndMaxOperators();
|
||||||
|
@ -217,28 +213,28 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
fMacroExpander= new MacroExpander(this, fMacroDictionary, fLocationMap, fMacroDefinitionParser, fLexOptions);
|
fMacroExpander= new MacroExpander(this, fMacroDictionary, fLocationMap, fMacroDefinitionParser, fLexOptions);
|
||||||
fCodeReaderFactory= readerFactory;
|
fCodeReaderFactory= readerFactory;
|
||||||
|
|
||||||
setupMacroDictionary(configuration, info);
|
setupMacroDictionary(configuration, info, language);
|
||||||
|
|
||||||
final String filePath= new String(reader.filename);
|
final String filePath= new String(reader.filename);
|
||||||
fAllIncludedFiles.add(filePath);
|
fAllIncludedFiles.add(filePath);
|
||||||
ILocationCtx ctx= fLocationMap.pushTranslationUnit(filePath, reader.buffer);
|
ILocationCtx ctx= fLocationMap.pushTranslationUnit(filePath, reader.buffer);
|
||||||
fRootLexer= new Lexer(reader.buffer, (LexerOptions) fLexOptions.clone(), this, this);
|
fRootLexer= new Lexer(reader.buffer, fLexOptions, this, this);
|
||||||
fRootContext= fCurrentContext= new ScannerContextFile(ctx, null, fRootLexer);
|
fRootContext= fCurrentContext= new ScannerContext(ctx, null, fRootLexer);
|
||||||
if (info instanceof IExtendedScannerInfo) {
|
if (info instanceof IExtendedScannerInfo) {
|
||||||
final IExtendedScannerInfo einfo= (IExtendedScannerInfo) info;
|
final IExtendedScannerInfo einfo= (IExtendedScannerInfo) info;
|
||||||
|
fPreIncludedFiles= new String[][] {einfo.getMacroFiles(), einfo.getIncludeFiles()};
|
||||||
// files provided on command line (-imacros, -include)
|
|
||||||
registerPreIncludedFiles(einfo.getMacroFiles(), einfo.getIncludeFiles());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setComputeImageLocations(boolean val) {
|
||||||
|
fLexOptions.fCreateImageLocations= val;
|
||||||
|
}
|
||||||
|
|
||||||
public void setContentAssistMode(int offset) {
|
public void setContentAssistMode(int offset) {
|
||||||
fContentAssistLimit= offset;
|
fContentAssistLimit= offset;
|
||||||
fRootLexer.setContentAssistMode(offset);
|
fRootLexer.setContentAssistMode(offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// mstodo scanner integration, keywords should be provided directly by the language
|
|
||||||
private void configureKeywords(ParserLanguage language, IScannerExtensionConfiguration configuration) {
|
private void configureKeywords(ParserLanguage language, IScannerExtensionConfiguration configuration) {
|
||||||
Keywords.addKeywordsPreprocessor(fPPKeywords);
|
Keywords.addKeywordsPreprocessor(fPPKeywords);
|
||||||
if (language == ParserLanguage.C) {
|
if (language == ParserLanguage.C) {
|
||||||
|
@ -279,7 +275,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return info.getIncludePaths();
|
return info.getIncludePaths();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setupMacroDictionary(IScannerExtensionConfiguration config, IScannerInfo info) {
|
private void setupMacroDictionary(IScannerExtensionConfiguration config, IScannerInfo info, ParserLanguage lang) {
|
||||||
// built in macros
|
// built in macros
|
||||||
fMacroDictionary.put(__STDC__.getNameCharArray(), __STDC__);
|
fMacroDictionary.put(__STDC__.getNameCharArray(), __STDC__);
|
||||||
fMacroDictionary.put(__FILE__.getNameCharArray(), __FILE__);
|
fMacroDictionary.put(__FILE__.getNameCharArray(), __FILE__);
|
||||||
|
@ -287,7 +283,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
fMacroDictionary.put(__TIME__.getNameCharArray(), __TIME__);
|
fMacroDictionary.put(__TIME__.getNameCharArray(), __TIME__);
|
||||||
fMacroDictionary.put(__LINE__.getNameCharArray(), __LINE__);
|
fMacroDictionary.put(__LINE__.getNameCharArray(), __LINE__);
|
||||||
|
|
||||||
if (fLanguage == ParserLanguage.CPP)
|
if (lang == ParserLanguage.CPP)
|
||||||
fMacroDictionary.put(__cplusplus.getNameCharArray(), __cplusplus);
|
fMacroDictionary.put(__cplusplus.getNameCharArray(), __cplusplus);
|
||||||
else {
|
else {
|
||||||
fMacroDictionary.put(__STDC_HOSTED__.getNameCharArray(), __STDC_HOSTED__);
|
fMacroDictionary.put(__STDC_HOSTED__.getNameCharArray(), __STDC_HOSTED__);
|
||||||
|
@ -322,18 +318,31 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void registerPreIncludedFiles(final String[] macroFiles, final String[] preIncludedFiles) {
|
private void handlePreIncludedFiles() {
|
||||||
if (preIncludedFiles != null && preIncludedFiles.length > 0) {
|
final String[] imacro= fPreIncludedFiles[0];
|
||||||
final char[] buffer= createSyntheticFile(preIncludedFiles);
|
if (imacro != null && imacro.length > 0) {
|
||||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, false);
|
final char[] buffer= createSyntheticFile(imacro);
|
||||||
fCurrentContext= new ScannerContextFile(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (macroFiles != null && macroFiles.length > 0) {
|
|
||||||
final char[] buffer= createSyntheticFile(macroFiles);
|
|
||||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, true);
|
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, true);
|
||||||
fCurrentContext= new ScannerContextMacroFile(this, ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
fCurrentContext= new ScannerContext(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||||
|
ScannerContext preCtx= fCurrentContext;
|
||||||
|
try {
|
||||||
|
while(internalFetchToken(true, false, true, preCtx).getType() != IToken.tEND_OF_INPUT) {
|
||||||
|
// just eat the tokens
|
||||||
}
|
}
|
||||||
|
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||||
|
fLocationMap.popContext(locationCtx);
|
||||||
|
fCurrentContext= fCurrentContext.getParent();
|
||||||
|
assert fCurrentContext == fRootContext;
|
||||||
|
} catch (OffsetLimitReachedException e) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final String[] include= fPreIncludedFiles[1];
|
||||||
|
if (include != null && include.length > 0) {
|
||||||
|
final char[] buffer= createSyntheticFile(include);
|
||||||
|
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, false);
|
||||||
|
fCurrentContext= new ScannerContext(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||||
|
}
|
||||||
|
fPreIncludedFiles= null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private char[] createSyntheticFile(String[] files) {
|
private char[] createSyntheticFile(String[] files) {
|
||||||
|
@ -356,19 +365,6 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if the given inclusion was already included before.
|
|
||||||
*
|
|
||||||
* @param inclusionData
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
// private boolean isRepeatedInclusion(InclusionData inclusionData) {
|
|
||||||
// return includedFiles.containsKey(inclusionData.reader.filename);
|
|
||||||
// }
|
|
||||||
|
|
||||||
public PreprocessorMacro addMacroDefinition(char[] key, char[] value) {
|
public PreprocessorMacro addMacroDefinition(char[] key, char[] value) {
|
||||||
final Lexer lex= new Lexer(key, fLexOptions, LEXERLOG_NULL, null);
|
final Lexer lex= new Lexer(key, fLexOptions, LEXERLOG_NULL, null);
|
||||||
try {
|
try {
|
||||||
|
@ -411,27 +407,78 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns next token for the parser.
|
* Returns the next token from the preprocessor without concatenating string literals.
|
||||||
* @throws OffsetLimitReachedException
|
*/
|
||||||
|
private Token fetchToken() throws OffsetLimitReachedException {
|
||||||
|
if (fPreIncludedFiles != null) {
|
||||||
|
handlePreIncludedFiles();
|
||||||
|
}
|
||||||
|
Token t= fPrefetchedTokens;
|
||||||
|
if (t != null) {
|
||||||
|
fPrefetchedTokens= (Token) t.getNext();
|
||||||
|
t.setNext(null);
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
|
||||||
|
t= internalFetchToken(true, false, true, fRootContext);
|
||||||
|
final int offset= fLocationMap.getSequenceNumberForOffset(t.getOffset());
|
||||||
|
final int endOffset= fLocationMap.getSequenceNumberForOffset(t.getEndOffset());
|
||||||
|
t.setOffset(offset, endOffset);
|
||||||
|
t.setNext(null);
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void pushbackToken(Token t) {
|
||||||
|
t.setNext(fPrefetchedTokens);
|
||||||
|
fPrefetchedTokens= t;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns next token for the parser. String literals are not concatenated. When
|
||||||
|
* the end is reached tokens with type {@link IToken#tEND_OF_INPUT}.
|
||||||
|
* @throws OffsetLimitReachedException see {@link Lexer}.
|
||||||
|
*/
|
||||||
|
public IToken nextTokenRaw() throws OffsetLimitReachedException {
|
||||||
|
if (isCancelled) {
|
||||||
|
throw new ParseError(ParseError.ParseErrorKind.TIMEOUT_OR_CANCELLED);
|
||||||
|
}
|
||||||
|
|
||||||
|
Token t1= fetchToken();
|
||||||
|
if (t1.getType() == IToken.tEND_OF_INPUT) {
|
||||||
|
if (fContentAssistLimit >= 0) {
|
||||||
|
int useType= IToken.tCOMPLETION;
|
||||||
|
if (fLastToken != null) {
|
||||||
|
final int lt= fLastToken.getType();
|
||||||
|
if (lt == IToken.tCOMPLETION || lt == IToken.tEOC) {
|
||||||
|
useType= IToken.tEOC;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
int sequenceNumber= fLocationMap.getSequenceNumberForOffset(fContentAssistLimit);
|
||||||
|
t1= new Token(useType, null, sequenceNumber, sequenceNumber);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (fLastToken != null) {
|
||||||
|
fLastToken.setNext(t1);
|
||||||
|
}
|
||||||
|
fLastToken= t1;
|
||||||
|
return t1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns next token for the parser. String literals are concatenated.
|
||||||
|
* @throws EndOfFileException when the end of the translation unit has been reached.
|
||||||
|
* @throws OffsetLimitReachedException see {@link Lexer}.
|
||||||
*/
|
*/
|
||||||
public IToken nextToken() throws EndOfFileException {
|
public IToken nextToken() throws EndOfFileException {
|
||||||
if (isCancelled) {
|
if (isCancelled) {
|
||||||
throw new ParseError(ParseError.ParseErrorKind.TIMEOUT_OR_CANCELLED);
|
throw new ParseError(ParseError.ParseErrorKind.TIMEOUT_OR_CANCELLED);
|
||||||
}
|
}
|
||||||
|
|
||||||
// use prefetched token or get a new one.
|
Token t1= fetchToken();
|
||||||
Token t1= fPrefetchedToken;
|
|
||||||
if (t1 == null) {
|
|
||||||
t1= fetchTokenFromPreprocessor();
|
|
||||||
adjustOffsets(t1);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fPrefetchedToken= null;
|
|
||||||
}
|
|
||||||
|
|
||||||
final int tt1= t1.getType();
|
final int tt1= t1.getType();
|
||||||
switch(tt1) {
|
switch(tt1) {
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
if (fContentAssistLimit < 0) {
|
if (fContentAssistLimit < 0) {
|
||||||
throw new EndOfFileException();
|
throw new EndOfFileException();
|
||||||
}
|
}
|
||||||
|
@ -453,8 +500,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
StringBuffer buf= null;
|
StringBuffer buf= null;
|
||||||
int endOffset= 0;
|
int endOffset= 0;
|
||||||
loop: while(true) {
|
loop: while(true) {
|
||||||
t2= fetchTokenFromPreprocessor();
|
t2= fetchToken();
|
||||||
adjustOffsets(t2);
|
|
||||||
final int tt2= t2.getType();
|
final int tt2= t2.getType();
|
||||||
switch(tt2) {
|
switch(tt2) {
|
||||||
case IToken.tLSTRING:
|
case IToken.tLSTRING:
|
||||||
|
@ -473,7 +519,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
break loop;
|
break loop;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fPrefetchedToken= t2;
|
pushbackToken(t2);
|
||||||
if (buf != null) {
|
if (buf != null) {
|
||||||
char[] image= new char[buf.length() + (isWide ? 3 : 2)];
|
char[] image= new char[buf.length() + (isWide ? 3 : 2)];
|
||||||
int off= -1;
|
int off= -1;
|
||||||
|
@ -494,78 +540,63 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return t1;
|
return t1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void adjustOffsets(Token t1) {
|
|
||||||
final int offset= fLocationMap.getSequenceNumberForOffset(t1.getOffset());
|
|
||||||
final int endOffset= fLocationMap.getSequenceNumberForOffset(t1.getEndOffset());
|
|
||||||
t1.setOffset(offset, endOffset);
|
|
||||||
t1.setNext(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void appendStringContent(StringBuffer buf, Token t1) {
|
private void appendStringContent(StringBuffer buf, Token t1) {
|
||||||
final char[] image= t1.getCharImage();
|
final char[] image= t1.getCharImage();
|
||||||
final int start= image[0]=='"' ? 1 : 2;
|
final int start= image[0]=='"' ? 1 : 2;
|
||||||
buf.append(image, start, image.length-start-1);
|
buf.append(image, start, image.length-start-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
Token internalFetchToken(final boolean expandMacros, final boolean stopAtNewline,
|
||||||
* Checks if the current token is a left parenthesis, newlines will be ignored.
|
final boolean checkNumbers, final ScannerContext uptoEndOfCtx) throws OffsetLimitReachedException {
|
||||||
* No preprocessing is performed.
|
|
||||||
*/
|
|
||||||
boolean findLParenthesisInContext() throws OffsetLimitReachedException {
|
|
||||||
Token t= fCurrentContext.currentLexerToken();
|
|
||||||
while(t.getType() == Lexer.tNEWLINE) {
|
|
||||||
t= fCurrentContext.nextPPToken();
|
|
||||||
}
|
|
||||||
return t.getType() == IToken.tLPAREN;
|
|
||||||
}
|
|
||||||
|
|
||||||
Token fetchTokenFromPreprocessor() throws OffsetLimitReachedException {
|
|
||||||
++fTokenCount;
|
++fTokenCount;
|
||||||
Token ppToken= fCurrentContext.currentLexerToken();
|
Token ppToken= fCurrentContext.currentLexerToken();
|
||||||
while(true) {
|
while(true) {
|
||||||
switch(ppToken.getType()) {
|
switch(ppToken.getType()) {
|
||||||
case Lexer.tBEFORE_INPUT:
|
case Lexer.tBEFORE_INPUT:
|
||||||
|
ppToken= fCurrentContext.nextPPToken();
|
||||||
|
continue;
|
||||||
|
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
|
if (stopAtNewline) {
|
||||||
|
return ppToken;
|
||||||
|
}
|
||||||
ppToken= fCurrentContext.nextPPToken();
|
ppToken= fCurrentContext.nextPPToken();
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case Lexer.tOTHER_CHARACTER:
|
case Lexer.tOTHER_CHARACTER:
|
||||||
if (!fExpandingMacro) {
|
|
||||||
handleProblem(IProblem.SCANNER_BAD_CHARACTER, ppToken.getCharImage(),
|
handleProblem(IProblem.SCANNER_BAD_CHARACTER, ppToken.getCharImage(),
|
||||||
ppToken.getOffset(), ppToken.getEndOffset());
|
ppToken.getOffset(), ppToken.getEndOffset());
|
||||||
ppToken= fCurrentContext.nextPPToken();
|
ppToken= fCurrentContext.nextPPToken();
|
||||||
continue;
|
continue;
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
if (fCurrentContext == uptoEndOfCtx || uptoEndOfCtx == null) {
|
||||||
if (locationCtx != null) {
|
|
||||||
fLocationMap.popContext(locationCtx);
|
|
||||||
}
|
|
||||||
fCurrentContext= fCurrentContext.getParent();
|
|
||||||
if (fCurrentContext == null) {
|
|
||||||
fCurrentContext= fRootContext;
|
|
||||||
return ppToken;
|
return ppToken;
|
||||||
}
|
}
|
||||||
|
final ILocationCtx locationCtx = fCurrentContext.getLocationCtx();
|
||||||
|
fLocationMap.popContext(locationCtx);
|
||||||
|
fCurrentContext= fCurrentContext.getParent();
|
||||||
|
assert fCurrentContext != null;
|
||||||
|
|
||||||
ppToken= fCurrentContext.currentLexerToken();
|
ppToken= fCurrentContext.currentLexerToken();
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case IToken.tPOUND:
|
case IToken.tPOUND:
|
||||||
final Lexer lexer= fCurrentContext.getLexerForPPDirective();
|
{
|
||||||
if (lexer != null) {
|
final Lexer lexer= fCurrentContext.getLexer();
|
||||||
|
if (lexer != null && lexer.currentTokenIsFirstOnLine()) {
|
||||||
executeDirective(lexer, ppToken.getOffset());
|
executeDirective(lexer, ppToken.getOffset());
|
||||||
ppToken= fCurrentContext.currentLexerToken();
|
ppToken= fCurrentContext.currentLexerToken();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
case IToken.tIDENTIFIER:
|
case IToken.tIDENTIFIER:
|
||||||
final boolean tryExpansion = !fExpandingMacro && fCurrentContext.expandsMacros();
|
|
||||||
|
|
||||||
fCurrentContext.nextPPToken(); // consume the identifier
|
fCurrentContext.nextPPToken(); // consume the identifier
|
||||||
if (tryExpansion && expandMacro(ppToken)) {
|
if (expandMacros) {
|
||||||
|
final Lexer lexer= fCurrentContext.getLexer();
|
||||||
|
if (lexer != null && expandMacro(ppToken, lexer, stopAtNewline)) {
|
||||||
ppToken= fCurrentContext.currentLexerToken();
|
ppToken= fCurrentContext.currentLexerToken();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -575,16 +606,17 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
if (tokenType != fKeywords.undefined) {
|
if (tokenType != fKeywords.undefined) {
|
||||||
ppToken.setType(tokenType);
|
ppToken.setType(tokenType);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return ppToken;
|
return ppToken;
|
||||||
|
|
||||||
case IToken.tINTEGER:
|
case IToken.tINTEGER:
|
||||||
if (fCheckNumbers && !fExpandingMacro) {
|
if (checkNumbers) {
|
||||||
checkNumber(ppToken, false);
|
checkNumber(ppToken, false);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tFLOATINGPT:
|
case IToken.tFLOATINGPT:
|
||||||
if (fCheckNumbers) {
|
if (checkNumbers) {
|
||||||
checkNumber(ppToken, true);
|
checkNumber(ppToken, true);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -822,12 +854,14 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
switch (ident.getType()) {
|
switch (ident.getType()) {
|
||||||
case IToken.tCOMPLETION:
|
case IToken.tCOMPLETION:
|
||||||
lexer.nextToken();
|
lexer.nextToken();
|
||||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, ident);
|
Token completionToken= new TokenWithImage(ident.getType(), null,
|
||||||
|
startOffset, ident.getEndOffset(), ("#" + ident.getImage()).toCharArray()); //$NON-NLS-1$
|
||||||
|
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, completionToken);
|
||||||
|
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
case IToken.tINTEGER:
|
case IToken.tINTEGER:
|
||||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
return;
|
return;
|
||||||
|
@ -836,8 +870,8 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
int endOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, ident.getCharImage(), startOffset, endOffset);
|
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, ident.getCharImage(), startOffset, lexer.getLastEndOffset());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -848,21 +882,10 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case IPreprocessorDirective.ppImport:
|
case IPreprocessorDirective.ppImport:
|
||||||
case IPreprocessorDirective.ppInclude:
|
case IPreprocessorDirective.ppInclude:
|
||||||
if (fExpandingMacro) {
|
|
||||||
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
|
||||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE, name, startOffset, condEndOffset);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
executeInclude(lexer, startOffset, false, true);
|
executeInclude(lexer, startOffset, false, true);
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case IPreprocessorDirective.ppInclude_next:
|
case IPreprocessorDirective.ppInclude_next:
|
||||||
if (fExpandingMacro) {
|
|
||||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
executeInclude(lexer, startOffset, true, true);
|
executeInclude(lexer, startOffset, true, true);
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case IPreprocessorDirective.ppDefine:
|
case IPreprocessorDirective.ppDefine:
|
||||||
executeDefine(lexer, startOffset);
|
executeDefine(lexer, startOffset);
|
||||||
|
@ -940,37 +963,25 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void executeInclude(final Lexer lexer, int poundOffset, boolean include_next, boolean active) throws OffsetLimitReachedException {
|
private void executeInclude(final Lexer lexer, int poundOffset, boolean include_next, boolean active) throws OffsetLimitReachedException {
|
||||||
char[] headerName= null;
|
|
||||||
boolean userInclude= true;
|
|
||||||
|
|
||||||
lexer.setInsideIncludeDirective(true);
|
lexer.setInsideIncludeDirective(true);
|
||||||
final Token header= lexer.nextToken();
|
final Token header= lexer.nextToken();
|
||||||
lexer.setInsideIncludeDirective(false);
|
lexer.setInsideIncludeDirective(false);
|
||||||
final int nameOffset= header.getOffset();
|
|
||||||
int nameEndOffset= header.getEndOffset();
|
int condEndOffset= header.getEndOffset();
|
||||||
int endOffset;
|
final int[] nameOffsets= new int[] {header.getOffset(), condEndOffset};
|
||||||
|
char[] headerName= null;
|
||||||
|
boolean userInclude= true;
|
||||||
|
|
||||||
switch(header.getType()) {
|
switch(header.getType()) {
|
||||||
case Lexer.tSYSTEM_HEADER_NAME:
|
case Lexer.tSYSTEM_HEADER_NAME:
|
||||||
userInclude= false;
|
userInclude= false;
|
||||||
char[] image= header.getCharImage();
|
headerName = extractHeaderName(header.getCharImage(), '<', '>', nameOffsets);
|
||||||
headerName= new char[image.length-2];
|
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
|
||||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
|
||||||
endOffset= lexer.currentToken().getEndOffset();
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case Lexer.tQUOTE_HEADER_NAME:
|
case Lexer.tQUOTE_HEADER_NAME:
|
||||||
image= header.getCharImage();
|
headerName = extractHeaderName(header.getCharImage(), '"', '"', nameOffsets);
|
||||||
if (image.length <= 2) {
|
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
headerName= CharArrayUtils.EMPTY;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
headerName= new char[image.length-2];
|
|
||||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
|
||||||
}
|
|
||||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
|
||||||
endOffset= lexer.currentToken().getEndOffset();
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IToken.tCOMPLETION:
|
case IToken.tCOMPLETION:
|
||||||
|
@ -978,17 +989,15 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
|
|
||||||
case IToken.tIDENTIFIER:
|
case IToken.tIDENTIFIER:
|
||||||
TokenList tl= new TokenList();
|
TokenList tl= new TokenList();
|
||||||
nameEndOffset= getPreprocessedTokensOfLine(lexer, tl);
|
condEndOffset= nameOffsets[1]= getTokensWithinPPDirective(lexer, false, tl);
|
||||||
endOffset= lexer.currentToken().getEndOffset();
|
|
||||||
Token t= tl.first();
|
Token t= tl.first();
|
||||||
if (t != null) {
|
if (t != null) {
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case IToken.tSTRING:
|
case IToken.tSTRING:
|
||||||
image= t.getCharImage();
|
headerName = extractHeaderName(t.getCharImage(), '"', '"', new int[]{0,0});
|
||||||
headerName= new char[image.length-2];
|
|
||||||
System.arraycopy(image, 1, headerName, 0, headerName.length);
|
|
||||||
break;
|
break;
|
||||||
case IToken.tLT:
|
case IToken.tLT:
|
||||||
|
userInclude= false;
|
||||||
boolean complete= false;
|
boolean complete= false;
|
||||||
StringBuffer buf= new StringBuffer();
|
StringBuffer buf= new StringBuffer();
|
||||||
t= (Token) t.getNext();
|
t= (Token) t.getNext();
|
||||||
|
@ -1000,23 +1009,22 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
buf.append(t.getImage());
|
buf.append(t.getImage());
|
||||||
t= (Token) t.getNext();
|
t= (Token) t.getNext();
|
||||||
}
|
}
|
||||||
if (!complete && fContentAssistLimit >= 0 && fCurrentContext == fRootContext) {
|
if (complete) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, null);
|
|
||||||
}
|
|
||||||
headerName= new char[buf.length()];
|
headerName= new char[buf.length()];
|
||||||
buf.getChars(0, buf.length(), headerName, 0);
|
buf.getChars(0, buf.length(), headerName, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
endOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
condEndOffset= lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (headerName == null || headerName.length==0) {
|
if (headerName == null || headerName.length==0) {
|
||||||
if (active) {
|
if (active) {
|
||||||
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE,
|
handleProblem(IProblem.PREPROCESSOR_INVALID_DIRECTIVE,
|
||||||
lexer.getInputChars(poundOffset, endOffset), poundOffset, nameEndOffset);
|
lexer.getInputChars(poundOffset, condEndOffset), poundOffset, condEndOffset);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1031,13 +1039,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
if (!isCircularInclusion(path)) {
|
if (!isCircularInclusion(path)) {
|
||||||
reported= true;
|
reported= true;
|
||||||
fAllIncludedFiles.add(path);
|
fAllIncludedFiles.add(path);
|
||||||
ILocationCtx ctx= fLocationMap.pushInclusion(poundOffset, nameOffset, nameEndOffset, endOffset, reader.buffer, path, headerName, userInclude);
|
ILocationCtx ctx= fLocationMap.pushInclusion(poundOffset, nameOffsets[0], nameOffsets[1], condEndOffset, reader.buffer, path, headerName, userInclude);
|
||||||
ScannerContextFile fctx= new ScannerContextFile(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this, this));
|
ScannerContext fctx= new ScannerContext(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this, this));
|
||||||
fCurrentContext= fctx;
|
fCurrentContext= fctx;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
handleProblem(IProblem.PREPROCESSOR_INCLUSION_NOT_FOUND, headerName, poundOffset, nameEndOffset);
|
handleProblem(IProblem.PREPROCESSOR_INCLUSION_NOT_FOUND, headerName, poundOffset, condEndOffset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -1060,10 +1068,28 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!reported) {
|
if (!reported) {
|
||||||
fLocationMap.encounterPoundInclude(poundOffset, nameOffset, nameEndOffset, endOffset, headerName, path, !userInclude, active);
|
fLocationMap.encounterPoundInclude(poundOffset, nameOffsets[0], nameOffsets[1], condEndOffset, headerName, path, userInclude, active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private char[] extractHeaderName(final char[] image, final char startDelim, final char endDelim, int[] offsets) {
|
||||||
|
char[] headerName;
|
||||||
|
int start= 0;
|
||||||
|
int length= image.length;
|
||||||
|
if (length > 0 && image[length-1] == endDelim) {
|
||||||
|
length--;
|
||||||
|
offsets[1]--;
|
||||||
|
if (length > 0 && image[0] == startDelim) {
|
||||||
|
offsets[0]++;
|
||||||
|
start++;
|
||||||
|
length--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
headerName= new char[length];
|
||||||
|
System.arraycopy(image, start, headerName, 0, length);
|
||||||
|
return headerName;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean isCircularInclusion(String filename) {
|
private boolean isCircularInclusion(String filename) {
|
||||||
ILocationCtx checkContext= fCurrentContext.getLocationCtx();
|
ILocationCtx checkContext= fCurrentContext.getLocationCtx();
|
||||||
while (checkContext != null) {
|
while (checkContext != null) {
|
||||||
|
@ -1141,7 +1167,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
boolean isActive= false;
|
boolean isActive= false;
|
||||||
TokenList condition= new TokenList();
|
TokenList condition= new TokenList();
|
||||||
final int condOffset= lexer.nextToken().getOffset();
|
final int condOffset= lexer.nextToken().getOffset();
|
||||||
final int condEndOffset= getPreprocessedTokensOfLine(lexer, condition);
|
final int condEndOffset= getTokensWithinPPDirective(lexer, true, condition);
|
||||||
final int endOffset= lexer.currentToken().getEndOffset();
|
final int endOffset= lexer.currentToken().getEndOffset();
|
||||||
|
|
||||||
if (condition.first() == null) {
|
if (condition.first() == null) {
|
||||||
|
@ -1166,28 +1192,38 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
/**
|
/**
|
||||||
* Runs the preprocessor on the rest of the line, storing the tokens in the holder supplied.
|
* Runs the preprocessor on the rest of the line, storing the tokens in the holder supplied.
|
||||||
* Macro expansion is reported to the location map.
|
* Macro expansion is reported to the location map.
|
||||||
* Returns the end-offset of the last token used from the input.
|
* In case isCondition is set to <code>true</code>, identifiers with image 'defined' are
|
||||||
|
* converted to the defined-token and its argument is not macro expanded.
|
||||||
|
* Returns the end-offset of the last token that was consumed.
|
||||||
*/
|
*/
|
||||||
private int getPreprocessedTokensOfLine(Lexer lexer, TokenList result) throws OffsetLimitReachedException {
|
private int getTokensWithinPPDirective(Lexer lexer, boolean isCondition, TokenList result) throws OffsetLimitReachedException {
|
||||||
final ScannerContext sctx= fCurrentContext;
|
final ScannerContext scannerCtx= fCurrentContext;
|
||||||
final ScannerContextPPDirective ppdCtx= new ScannerContextPPDirective(lexer, true);
|
boolean expandMacros= true;
|
||||||
fCurrentContext= ppdCtx;
|
loop: while(true) {
|
||||||
boolean cn= fCheckNumbers;
|
Token t= internalFetchToken(expandMacros, true, false, scannerCtx);
|
||||||
fCheckNumbers= false;
|
switch(t.getType()) {
|
||||||
try {
|
case IToken.tEND_OF_INPUT:
|
||||||
Token t= fetchTokenFromPreprocessor();
|
case IToken.tCOMPLETION:
|
||||||
while (t.getType() != Lexer.tEND_OF_INPUT) {
|
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE); // make sure the exception is thrown.
|
||||||
|
break loop;
|
||||||
|
case Lexer.tNEWLINE:
|
||||||
|
break loop;
|
||||||
|
case IToken.tIDENTIFIER:
|
||||||
|
if (isCondition && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
|
||||||
|
t.setType(CPreprocessor.tDEFINED);
|
||||||
|
expandMacros= false;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case IToken.tLPAREN:
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
expandMacros= true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
result.append(t);
|
result.append(t);
|
||||||
t= fetchTokenFromPreprocessor();
|
|
||||||
}
|
}
|
||||||
// make sure an exception is thrown if we are running content assist at the end of the line
|
// make sure an exception is thrown if we are running content assist at the end of the line
|
||||||
lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
return lexer.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
|
||||||
return ppdCtx.getLastEndOffset();
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
fCurrentContext= sctx;
|
|
||||||
fCheckNumbers= cn;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void skipOverConditionalCode(final Lexer lexer, boolean takeElseBranch) throws OffsetLimitReachedException {
|
private void skipOverConditionalCode(final Lexer lexer, boolean takeElseBranch) throws OffsetLimitReachedException {
|
||||||
|
@ -1262,7 +1298,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
int condOffset= lexer.nextToken().getOffset();
|
int condOffset= lexer.nextToken().getOffset();
|
||||||
if (nesting == 0 && takeElseBranch) {
|
if (nesting == 0 && takeElseBranch) {
|
||||||
TokenList condition= new TokenList();
|
TokenList condition= new TokenList();
|
||||||
condEndOffset= getPreprocessedTokensOfLine(lexer, condition);
|
condEndOffset= getTokensWithinPPDirective(lexer, true, condition);
|
||||||
if (condition.first() != null) {
|
if (condition.first() != null) {
|
||||||
try {
|
try {
|
||||||
isActive= fExpressionEvaluator.evaluate(condition, fMacroDictionary);
|
isActive= fExpressionEvaluator.evaluate(condition, fMacroDictionary);
|
||||||
|
@ -1314,13 +1350,12 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
* Checks whether the identifier causes a macro expansion. May advance the current lexer
|
* Checks whether the identifier causes a macro expansion. May advance the current lexer
|
||||||
* to check for the opening bracket succeeding the identifier.
|
* to check for the opening bracket succeeding the identifier.
|
||||||
* <p>
|
* <p>
|
||||||
* If applicable the macro is expanded and the resulting tokens are put into a scanner context.
|
* If applicable the macro is expanded and the resulting tokens are put onto a new context.
|
||||||
* @param identifier the token where macro expansion may occur.
|
* @param identifier the token where macro expansion may occur.
|
||||||
* @param multiline whether we are allowed to check subsequent lines for macro arguments.
|
* @param lexer the input for the expansion.
|
||||||
* @return
|
* @param stopAtNewline whether or not tokens to be read are limited to the current line.
|
||||||
* @throws OffsetLimitReachedException
|
|
||||||
*/
|
*/
|
||||||
private boolean expandMacro(final Token identifier) throws OffsetLimitReachedException {
|
private boolean expandMacro(final Token identifier, Lexer lexer, boolean stopAtNewline) throws OffsetLimitReachedException {
|
||||||
final char[] name= identifier.getCharImage();
|
final char[] name= identifier.getCharImage();
|
||||||
PreprocessorMacro macro= (PreprocessorMacro) fMacroDictionary.get(name);
|
PreprocessorMacro macro= (PreprocessorMacro) fMacroDictionary.get(name);
|
||||||
if (macro == null) {
|
if (macro == null) {
|
||||||
|
@ -1328,23 +1363,25 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (macro instanceof FunctionStyleMacro) {
|
if (macro instanceof FunctionStyleMacro) {
|
||||||
if (!findLParenthesisInContext()) {
|
Token t= lexer.currentToken();
|
||||||
|
if (!stopAtNewline) {
|
||||||
|
while(t.getType() == Lexer.tNEWLINE) {
|
||||||
|
t= lexer.nextToken();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (t.getType() != IToken.tLPAREN) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fExpandingMacro= true;
|
|
||||||
final boolean contentAssist = fContentAssistLimit>=0 && fCurrentContext == fRootContext;
|
final boolean contentAssist = fContentAssistLimit>=0 && fCurrentContext == fRootContext;
|
||||||
TokenList replacement= new TokenList();
|
TokenList replacement= fMacroExpander.expand(lexer, stopAtNewline, macro, identifier, contentAssist);
|
||||||
final int endOffset= fMacroExpander.expand(macro, identifier, contentAssist, replacement);
|
final IASTName[] expansions= fMacroExpander.clearImplicitExpansions();
|
||||||
fExpandingMacro= false;
|
final ImageLocationInfo[] ili= fMacroExpander.clearImageLocationInfos();
|
||||||
|
final Token last= replacement.last();
|
||||||
final ImageLocationInfo[] ili= fMacroExpander.createImageLocations(replacement);
|
final int length= last == null ? 0 : last.getEndOffset();
|
||||||
final IASTName[] expansions= fMacroExpander.createImplicitExpansions();
|
|
||||||
final int length= fMacroExpander.adjustOffsets(replacement);
|
|
||||||
ILocationCtx ctx= fLocationMap.pushMacroExpansion(
|
ILocationCtx ctx= fLocationMap.pushMacroExpansion(
|
||||||
identifier.getOffset(), identifier.getEndOffset(), endOffset, length, macro, expansions, ili);
|
identifier.getOffset(), identifier.getEndOffset(), lexer.getLastEndOffset(), length, macro, expansions, ili);
|
||||||
fCurrentContext= new ScannerContextMacroExpansion(ctx, fCurrentContext, replacement);
|
fCurrentContext= new ScannerContext(ctx, fCurrentContext, replacement);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1355,7 +1392,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// stuff to be removed
|
// old scanner, remove this.
|
||||||
public CharArrayObjectMap getRealDefinitions() {
|
public CharArrayObjectMap getRealDefinitions() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
@ -1366,7 +1403,6 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
public void setScanComments(boolean val) {
|
public void setScanComments(boolean val) {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
public char[] getMainFilename() {
|
public char[] getMainFilename() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
|
|
@ -272,7 +272,7 @@ class ExpressionEvaluator {
|
||||||
private void consume() {
|
private void consume() {
|
||||||
fTokens= (Token) fTokens.getNext();
|
fTokens= (Token) fTokens.getNext();
|
||||||
if (fTokens == null) {
|
if (fTokens == null) {
|
||||||
fTokens= new Token(Lexer.tEND_OF_INPUT, null, 0, 0);
|
fTokens= new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||||
|
@ -74,7 +75,7 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see IASTTranslationUnit#getContainingFilename()
|
* @see IASTTranslationUnit#getContainingFilename()
|
||||||
* mstodo- scanner removal should be renamed
|
* mstodo- old location resolver, should be renamed
|
||||||
*/
|
*/
|
||||||
String getContainingFilename(int sequenceNumber);
|
String getContainingFilename(int sequenceNumber);
|
||||||
|
|
||||||
|
@ -108,6 +109,11 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
||||||
*/
|
*/
|
||||||
IASTNodeLocation[] getLocations(int sequenceNumber, int length);
|
IASTNodeLocation[] getLocations(int sequenceNumber, int length);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see IASTName#getImageLocation()
|
||||||
|
*/
|
||||||
|
IASTImageLocation getImageLocation(int offset, int length);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the sequence-number for the given file-path and offset, or <code>-1</code> if this file
|
* Returns the sequence-number for the given file-path and offset, or <code>-1</code> if this file
|
||||||
* is not part of the translation-unit.
|
* is not part of the translation-unit.
|
||||||
|
@ -122,7 +128,9 @@ public interface ILocationResolver extends org.eclipse.cdt.internal.core.parser.
|
||||||
char[] getUnpreprocessedSignature(IASTFileLocation loc);
|
char[] getUnpreprocessedSignature(IASTFileLocation loc);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a preprocessor node surrounding the given range, or <code>null</code>.
|
* Returns a preprocessor node surrounding the given range, or <code>null</code>. The result is either a
|
||||||
|
* preprocessing directive ({@link IASTPreprocessorStatement}) or a name contained therein {@link IASTName} or
|
||||||
|
* a macro expansion ({@link IASTName}).
|
||||||
*/
|
*/
|
||||||
IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length);
|
IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,14 +10,82 @@
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Information needed for computing image-locations. An image location exists for a name and describes where the name
|
* Information needed for computing image-locations. An image location exists for a name and describes where the name
|
||||||
* came from. This can be: source code, macro-expansion, parameter to macro-expansion or synthetic.
|
* came from. This can be: source code, macro-expansion, parameter to macro-expansion or synthetic.
|
||||||
*
|
*
|
||||||
* @since 5.0
|
* @since 5.0
|
||||||
*/
|
*/
|
||||||
public class ImageLocationInfo {
|
public abstract class ImageLocationInfo {
|
||||||
|
|
||||||
public static final ImageLocationInfo[] NO_LOCATION_INFOS= {};
|
public static final ImageLocationInfo[] NO_LOCATION_INFOS= {};
|
||||||
|
|
||||||
|
int fTokenOffsetInExpansion= -1;
|
||||||
|
|
||||||
|
public abstract IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto);
|
||||||
|
public abstract boolean canConcatenate(ImageLocationInfo info);
|
||||||
|
|
||||||
|
public static class MacroImageLocationInfo extends ImageLocationInfo {
|
||||||
|
private final ObjectStyleMacro fMacro;
|
||||||
|
private final int fOffset;
|
||||||
|
private final int fEndOffset;
|
||||||
|
public MacroImageLocationInfo(ObjectStyleMacro macro, int offset, int endOffset) {
|
||||||
|
fMacro= macro;
|
||||||
|
fOffset= offset;
|
||||||
|
fEndOffset= endOffset;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto) {
|
||||||
|
IASTPreprocessorMacroDefinition md= lm.getMacroDefinition(fMacro);
|
||||||
|
IASTFileLocation expansionLoc= md.getExpansionLocation();
|
||||||
|
if (expansionLoc != null) {
|
||||||
|
final int length= ((MacroImageLocationInfo) upto).fEndOffset - fOffset;
|
||||||
|
return new ASTImageLocation(IASTImageLocation.MACRO_DEFINITION,
|
||||||
|
expansionLoc.getFileName(), expansionLoc.getNodeOffset() + fOffset, length);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean canConcatenate(ImageLocationInfo info) {
|
||||||
|
if (info instanceof MacroImageLocationInfo) {
|
||||||
|
MacroImageLocationInfo mli= (MacroImageLocationInfo) info;
|
||||||
|
if (mli.fMacro == fMacro && fEndOffset <= mli.fOffset) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class ParameterImageLocationInfo extends ImageLocationInfo {
|
||||||
|
public int fSequenceNumber;
|
||||||
|
public int fSequenceEndNumber;
|
||||||
|
public ParameterImageLocationInfo(int sequenceNumber, int sequenceEndNumber) {
|
||||||
|
fSequenceNumber= sequenceNumber;
|
||||||
|
fSequenceEndNumber= sequenceEndNumber;
|
||||||
|
}
|
||||||
|
public IASTImageLocation createLocation(LocationMap lm, ImageLocationInfo upto) {
|
||||||
|
int sequenceEnd= ((ParameterImageLocationInfo) upto).fSequenceEndNumber;
|
||||||
|
IASTFileLocation loc= lm.getMappedFileLocation(fSequenceNumber, sequenceEnd-fSequenceNumber);
|
||||||
|
if (loc != null) {
|
||||||
|
return new ASTImageLocation(IASTImageLocation.ARGUMENT_TO_MACRO_EXPANSION,
|
||||||
|
loc.getFileName(), loc.getNodeOffset(), loc.getNodeLength());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean canConcatenate(ImageLocationInfo info) {
|
||||||
|
if (info instanceof ParameterImageLocationInfo) {
|
||||||
|
ParameterImageLocationInfo pli= (ParameterImageLocationInfo) info;
|
||||||
|
if (fSequenceEndNumber <= pli.fSequenceNumber) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||||
* Returns preprocessor tokens.
|
* Returns preprocessor tokens.
|
||||||
* <p>
|
* <p>
|
||||||
* In addition to the preprocessor tokens the following tokens may also be returned:
|
* In addition to the preprocessor tokens the following tokens may also be returned:
|
||||||
* {@link #tBEFORE_INPUT}, {@link #tEND_OF_INPUT}, {@link IToken#tCOMPLETION}.
|
* {@link #tBEFORE_INPUT}, {@link IToken#tEND_OF_INPUT}, {@link IToken#tCOMPLETION}.
|
||||||
* <p>
|
* <p>
|
||||||
* Number literals are split up into {@link IToken#tINTEGER} and {@link IToken#tFLOATINGPT}.
|
* Number literals are split up into {@link IToken#tINTEGER} and {@link IToken#tFLOATINGPT}.
|
||||||
* No checks are done on the number literals.
|
* No checks are done on the number literals.
|
||||||
|
@ -38,10 +38,9 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||||
final public class Lexer {
|
final public class Lexer {
|
||||||
public static final int tBEFORE_INPUT = IToken.FIRST_RESERVED_SCANNER;
|
public static final int tBEFORE_INPUT = IToken.FIRST_RESERVED_SCANNER;
|
||||||
public static final int tNEWLINE = IToken.FIRST_RESERVED_SCANNER + 1;
|
public static final int tNEWLINE = IToken.FIRST_RESERVED_SCANNER + 1;
|
||||||
public static final int tEND_OF_INPUT = IToken.FIRST_RESERVED_SCANNER + 2;
|
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 2;
|
||||||
public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3;
|
||||||
public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 4;
|
public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 4;
|
||||||
public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 5;
|
|
||||||
|
|
||||||
private static final int END_OF_INPUT = -1;
|
private static final int END_OF_INPUT = -1;
|
||||||
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER;
|
||||||
|
@ -49,7 +48,7 @@ final public class Lexer {
|
||||||
public final static class LexerOptions implements Cloneable {
|
public final static class LexerOptions implements Cloneable {
|
||||||
public boolean fSupportDollarInitializers= true;
|
public boolean fSupportDollarInitializers= true;
|
||||||
public boolean fSupportMinAndMax= true;
|
public boolean fSupportMinAndMax= true;
|
||||||
public boolean fSupportContentAssist= false;
|
public boolean fCreateImageLocations= true;
|
||||||
|
|
||||||
public Object clone() {
|
public Object clone() {
|
||||||
try {
|
try {
|
||||||
|
@ -62,6 +61,7 @@ final public class Lexer {
|
||||||
|
|
||||||
// configuration
|
// configuration
|
||||||
private final LexerOptions fOptions;
|
private final LexerOptions fOptions;
|
||||||
|
private boolean fSupportContentAssist= false;
|
||||||
private final ILexerLog fLog;
|
private final ILexerLog fLog;
|
||||||
private final Object fSource;
|
private final Object fSource;
|
||||||
|
|
||||||
|
@ -77,12 +77,12 @@ final public class Lexer {
|
||||||
|
|
||||||
private boolean fInsideIncludeDirective= false;
|
private boolean fInsideIncludeDirective= false;
|
||||||
private Token fToken;
|
private Token fToken;
|
||||||
|
private Token fLastToken;
|
||||||
|
|
||||||
// for the few cases where we have to lookahead more than one character
|
// for the few cases where we have to lookahead more than one character
|
||||||
private int fMarkOffset;
|
private int fMarkOffset;
|
||||||
private int fMarkEndOffset;
|
private int fMarkEndOffset;
|
||||||
private int fMarkPrefetchedChar;
|
private int fMarkPrefetchedChar;
|
||||||
private boolean fFirstTokenAfterNewline= true;
|
|
||||||
|
|
||||||
|
|
||||||
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
||||||
|
@ -96,7 +96,7 @@ final public class Lexer {
|
||||||
fOptions= options;
|
fOptions= options;
|
||||||
fLog= log;
|
fLog= log;
|
||||||
fSource= source;
|
fSource= source;
|
||||||
fToken= new Token(tBEFORE_INPUT, source, start, start);
|
fLastToken= fToken= new Token(tBEFORE_INPUT, source, start, start);
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,8 +111,8 @@ final public class Lexer {
|
||||||
* Resets the lexer to the first char and prepares for content-assist mode.
|
* Resets the lexer to the first char and prepares for content-assist mode.
|
||||||
*/
|
*/
|
||||||
public void setContentAssistMode(int offset) {
|
public void setContentAssistMode(int offset) {
|
||||||
fOptions.fSupportContentAssist= true;
|
fSupportContentAssist= true;
|
||||||
fLimit= Math.min(fLimit, fInput.length);
|
fLimit= Math.min(offset, fInput.length);
|
||||||
// re-initialize
|
// re-initialize
|
||||||
fOffset= fEndOffset= fStart;
|
fOffset= fEndOffset= fStart;
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
|
@ -133,24 +133,31 @@ final public class Lexer {
|
||||||
return fToken;
|
return fToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the endoffset of the token before the current one.
|
||||||
|
*/
|
||||||
|
public int getLastEndOffset() {
|
||||||
|
return fLastToken.getEndOffset();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Advances to the next token, skipping whitespace other than newline.
|
* Advances to the next token, skipping whitespace other than newline.
|
||||||
* @throws OffsetLimitReachedException when completion is requested in a literal or a header-name.
|
* @throws OffsetLimitReachedException when completion is requested in a literal or a header-name.
|
||||||
*/
|
*/
|
||||||
public Token nextToken() throws OffsetLimitReachedException {
|
public Token nextToken() throws OffsetLimitReachedException {
|
||||||
final int t= fToken.getType();
|
fLastToken= fToken;
|
||||||
fFirstTokenAfterNewline= t == tNEWLINE || t == tBEFORE_INPUT;
|
|
||||||
return fToken= fetchToken();
|
return fToken= fetchToken();
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean currentTokenIsFirstOnLine() {
|
public boolean currentTokenIsFirstOnLine() {
|
||||||
return fFirstTokenAfterNewline;
|
final int type= fLastToken.getType();
|
||||||
|
return type == tNEWLINE || type == tBEFORE_INPUT;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Advances to the next newline.
|
* Advances to the next newline or the end of input. The newline will not be consumed. If the
|
||||||
* @return the end offset of the last token before the newline or the start of the newline
|
* current token is a newline no action is performed.
|
||||||
* if there were no other tokens.
|
* Returns the end offset of the last token before the newline.
|
||||||
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
|
* @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
|
||||||
* @since 5.0
|
* @since 5.0
|
||||||
*/
|
*/
|
||||||
|
@ -160,17 +167,22 @@ final public class Lexer {
|
||||||
while(true) {
|
while(true) {
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case IToken.tCOMPLETION:
|
case IToken.tCOMPLETION:
|
||||||
|
if (lt != null) {
|
||||||
|
fLastToken= lt;
|
||||||
|
}
|
||||||
fToken= t;
|
fToken= t;
|
||||||
throw new OffsetLimitReachedException(origin, t);
|
throw new OffsetLimitReachedException(origin, t);
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
fToken= t;
|
if (fSupportContentAssist) {
|
||||||
if (fOptions.fSupportContentAssist) {
|
throw new OffsetLimitReachedException(origin, t);
|
||||||
throw new OffsetLimitReachedException(origin, lt);
|
|
||||||
}
|
}
|
||||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
// no break;
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
fToken= t;
|
fToken= t;
|
||||||
return lt != null ? lt.getEndOffset() : t.getOffset();
|
if (lt != null) {
|
||||||
|
fLastToken= lt;
|
||||||
|
}
|
||||||
|
return getLastEndOffset();
|
||||||
}
|
}
|
||||||
lt= t;
|
lt= t;
|
||||||
t= fetchToken();
|
t= fetchToken();
|
||||||
|
@ -219,7 +231,7 @@ final public class Lexer {
|
||||||
|
|
||||||
switch(c) {
|
switch(c) {
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
fToken= newToken(Lexer.tEND_OF_INPUT, start);
|
fLastToken= fToken= newToken(IToken.tEND_OF_INPUT, start);
|
||||||
return fToken;
|
return fToken;
|
||||||
case '\n':
|
case '\n':
|
||||||
haveNL= true;
|
haveNL= true;
|
||||||
|
@ -266,7 +278,7 @@ final public class Lexer {
|
||||||
}
|
}
|
||||||
restorePhase3();
|
restorePhase3();
|
||||||
}
|
}
|
||||||
fFirstTokenAfterNewline= true;
|
fLastToken= new Token(tNEWLINE, fSource, 0, start); // offset not significant
|
||||||
fToken= newDigraphToken(IToken.tPOUND, start);
|
fToken= newDigraphToken(IToken.tPOUND, start);
|
||||||
return fToken;
|
return fToken;
|
||||||
}
|
}
|
||||||
|
@ -275,7 +287,7 @@ final public class Lexer {
|
||||||
|
|
||||||
case '#':
|
case '#':
|
||||||
if (hadNL && d != '#') {
|
if (hadNL && d != '#') {
|
||||||
fFirstTokenAfterNewline= true;
|
fLastToken= new Token(tNEWLINE, fSource, 0, start); // offset not significant
|
||||||
fToken= newToken(IToken.tPOUND, start);
|
fToken= newToken(IToken.tPOUND, start);
|
||||||
return fToken;
|
return fToken;
|
||||||
}
|
}
|
||||||
|
@ -297,7 +309,7 @@ final public class Lexer {
|
||||||
final int d= nextCharPhase3();
|
final int d= nextCharPhase3();
|
||||||
switch(c) {
|
switch(c) {
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
return newToken(Lexer.tEND_OF_INPUT, start);
|
return newToken(IToken.tEND_OF_INPUT, start);
|
||||||
case '\n':
|
case '\n':
|
||||||
fInsideIncludeDirective= false;
|
fInsideIncludeDirective= false;
|
||||||
return newToken(Lexer.tNEWLINE, start);
|
return newToken(Lexer.tNEWLINE, start);
|
||||||
|
@ -633,7 +645,7 @@ final public class Lexer {
|
||||||
loop: while (!done) {
|
loop: while (!done) {
|
||||||
switch (c) {
|
switch (c) {
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
if (fOptions.fSupportContentAssist) {
|
if (fSupportContentAssist) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||||
newToken((expectQuotes ? tQUOTE_HEADER_NAME : tSYSTEM_HEADER_NAME), start, length));
|
newToken((expectQuotes ? tQUOTE_HEADER_NAME : tSYSTEM_HEADER_NAME), start, length));
|
||||||
}
|
}
|
||||||
|
@ -695,7 +707,7 @@ final public class Lexer {
|
||||||
loop: while (!done) {
|
loop: while (!done) {
|
||||||
switch(c) {
|
switch(c) {
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
if (fOptions.fSupportContentAssist) {
|
if (fSupportContentAssist) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length));
|
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLSTRING : IToken.tSTRING, start, length));
|
||||||
}
|
}
|
||||||
// no break;
|
// no break;
|
||||||
|
@ -731,7 +743,7 @@ final public class Lexer {
|
||||||
loop: while (!done) {
|
loop: while (!done) {
|
||||||
switch(c) {
|
switch(c) {
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
if (fOptions.fSupportContentAssist) {
|
if (fSupportContentAssist) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLCHAR : IToken.tCHAR, start, length));
|
throw new OffsetLimitReachedException(ORIGIN_LEXER, newToken(wide ? IToken.tLCHAR : IToken.tCHAR, start, length));
|
||||||
}
|
}
|
||||||
// no break;
|
// no break;
|
||||||
|
@ -788,7 +800,7 @@ final public class Lexer {
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case END_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
if (fOptions.fSupportContentAssist) {
|
if (fSupportContentAssist) {
|
||||||
tokenKind= IToken.tCOMPLETION;
|
tokenKind= IToken.tCOMPLETION;
|
||||||
}
|
}
|
||||||
isPartOfIdentifier= false;
|
isPartOfIdentifier= false;
|
||||||
|
@ -878,8 +890,8 @@ final public class Lexer {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case tEND_OF_INPUT:
|
case END_OF_INPUT:
|
||||||
if (fOptions.fSupportContentAssist) {
|
if (fSupportContentAssist) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
throw new OffsetLimitReachedException(ORIGIN_LEXER,
|
||||||
newToken((isFloat ? IToken.tFLOATINGPT : IToken.tINTEGER), start, length));
|
newToken((isFloat ? IToken.tFLOATINGPT : IToken.tINTEGER), start, length));
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,6 +90,13 @@ abstract class LocationCtx implements ILocationCtx {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the macro-expansion surrounding or augmenting the given range, or <code>null</code>.
|
||||||
|
*/
|
||||||
|
public LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the minimal file location containing the specified sequence number range, assuming
|
* Returns the minimal file location containing the specified sequence number range, assuming
|
||||||
* that it is contained in this context.
|
* that it is contained in this context.
|
||||||
|
|
|
@ -83,17 +83,26 @@ class LocationCtxContainer extends LocationCtx {
|
||||||
|
|
||||||
public final LocationCtx findSurroundingContext(int sequenceNumber, int length) {
|
public final LocationCtx findSurroundingContext(int sequenceNumber, int length) {
|
||||||
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber);
|
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, false);
|
||||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||||
return child.findSurroundingContext(sequenceNumber, length);
|
return child.findSurroundingContext(sequenceNumber, length);
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||||
|
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||||
|
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, true);
|
||||||
|
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||||
|
return child.findSurroundingMacroExpansion(sequenceNumber, length);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public IASTFileLocation findMappedFileLocation(int sequenceNumber, int length) {
|
public IASTFileLocation findMappedFileLocation(int sequenceNumber, int length) {
|
||||||
// try to delegate to a child.
|
// try to delegate to a child.
|
||||||
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||||
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber);
|
final LocationCtx child= findChildLessOrEqualThan(sequenceNumber, false);
|
||||||
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
if (child != null && child.fSequenceNumber+child.getSequenceLength() > testEnd) {
|
||||||
return child.findMappedFileLocation(sequenceNumber, length);
|
return child.findMappedFileLocation(sequenceNumber, length);
|
||||||
}
|
}
|
||||||
|
@ -103,7 +112,7 @@ class LocationCtxContainer extends LocationCtx {
|
||||||
public boolean collectLocations(int sequenceNumber, final int length, ArrayList locations) {
|
public boolean collectLocations(int sequenceNumber, final int length, ArrayList locations) {
|
||||||
final int endSequenceNumber= sequenceNumber+length;
|
final int endSequenceNumber= sequenceNumber+length;
|
||||||
if (fChildren != null) {
|
if (fChildren != null) {
|
||||||
int childIdx= Math.max(0, findChildIdxLessOrEqualThan(sequenceNumber));
|
int childIdx= Math.max(0, findChildIdxLessOrEqualThan(sequenceNumber, false));
|
||||||
for (; childIdx < fChildren.size(); childIdx++) {
|
for (; childIdx < fChildren.size(); childIdx++) {
|
||||||
final LocationCtx child= (LocationCtx) fChildren.get(childIdx);
|
final LocationCtx child= (LocationCtx) fChildren.get(childIdx);
|
||||||
|
|
||||||
|
@ -154,7 +163,7 @@ class LocationCtxContainer extends LocationCtx {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int findChildIdxLessOrEqualThan(int sequenceNumber) {
|
final int findChildIdxLessOrEqualThan(int sequenceNumber, boolean beforeReplacedChars) {
|
||||||
if (fChildren == null) {
|
if (fChildren == null) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -163,7 +172,11 @@ class LocationCtxContainer extends LocationCtx {
|
||||||
while (upper > lower) {
|
while (upper > lower) {
|
||||||
int middle= (upper+lower)/2;
|
int middle= (upper+lower)/2;
|
||||||
LocationCtx child= (LocationCtx) fChildren.get(middle);
|
LocationCtx child= (LocationCtx) fChildren.get(middle);
|
||||||
if (child.fSequenceNumber <= sequenceNumber) {
|
int childSequenceNumber= child.fSequenceNumber;
|
||||||
|
if (beforeReplacedChars) {
|
||||||
|
childSequenceNumber-= child.fEndOffsetInParent-child.fOffsetInParent;
|
||||||
|
}
|
||||||
|
if (childSequenceNumber <= sequenceNumber) {
|
||||||
lower= middle+1;
|
lower= middle+1;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -173,8 +186,8 @@ class LocationCtxContainer extends LocationCtx {
|
||||||
return lower-1;
|
return lower-1;
|
||||||
}
|
}
|
||||||
|
|
||||||
final LocationCtx findChildLessOrEqualThan(final int sequenceNumber) {
|
final LocationCtx findChildLessOrEqualThan(final int sequenceNumber, boolean beforeReplacedChars) {
|
||||||
final int idx= findChildIdxLessOrEqualThan(sequenceNumber);
|
final int idx= findChildIdxLessOrEqualThan(sequenceNumber, beforeReplacedChars);
|
||||||
return idx >= 0 ? (LocationCtx) fChildren.get(idx) : null;
|
return idx >= 0 ? (LocationCtx) fChildren.get(idx) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,8 +41,8 @@ class LocationCtxFile extends LocationCtxContainer {
|
||||||
// try to delegate to a child.
|
// try to delegate to a child.
|
||||||
final int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
final int testEnd= length > 1 ? sequenceNumber+length-1 : sequenceNumber;
|
||||||
final int sequenceEnd= sequenceNumber+length;
|
final int sequenceEnd= sequenceNumber+length;
|
||||||
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber);
|
final LocationCtx child1= findChildLessOrEqualThan(sequenceNumber, false);
|
||||||
final LocationCtx child2= testEnd == sequenceNumber ? child1 : findChildLessOrEqualThan(testEnd);
|
final LocationCtx child2= testEnd == sequenceNumber ? child1 : findChildLessOrEqualThan(testEnd, false);
|
||||||
|
|
||||||
if (child1 == child2 && child1 != null && child1.fSequenceNumber + child1.getSequenceLength() > testEnd) {
|
if (child1 == child2 && child1 != null && child1.fSequenceNumber + child1.getSequenceLength() > testEnd) {
|
||||||
return child1.findMappedFileLocation(sequenceNumber, length);
|
return child1.findMappedFileLocation(sequenceNumber, length);
|
||||||
|
|
|
@ -12,6 +12,7 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorMacroDefinition;
|
||||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||||
|
|
||||||
|
@ -22,7 +23,8 @@ import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||||
class LocationCtxMacroExpansion extends LocationCtx {
|
class LocationCtxMacroExpansion extends LocationCtx {
|
||||||
private final LocationMap fLocationMap;
|
private final LocationMap fLocationMap;
|
||||||
private final int fLength;
|
private final int fLength;
|
||||||
private ASTMacroReferenceName fName;
|
private final ASTMacroReferenceName fName;
|
||||||
|
private final ImageLocationInfo[] fLocationInfos;
|
||||||
|
|
||||||
public LocationCtxMacroExpansion(LocationMap map, LocationCtxContainer parent, int parentOffset, int parentEndOffset,
|
public LocationCtxMacroExpansion(LocationMap map, LocationCtxContainer parent, int parentOffset, int parentEndOffset,
|
||||||
int sequenceNumber, int length, ImageLocationInfo[] imageLocations, ASTMacroReferenceName expansion) {
|
int sequenceNumber, int length, ImageLocationInfo[] imageLocations, ASTMacroReferenceName expansion) {
|
||||||
|
@ -30,6 +32,7 @@ class LocationCtxMacroExpansion extends LocationCtx {
|
||||||
fLocationMap= map;
|
fLocationMap= map;
|
||||||
fLength= length;
|
fLength= length;
|
||||||
fName= expansion;
|
fName= expansion;
|
||||||
|
fLocationInfos= imageLocations;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getSequenceLength() {
|
public int getSequenceLength() {
|
||||||
|
@ -49,9 +52,48 @@ class LocationCtxMacroExpansion extends LocationCtx {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ASTMacroReferenceName getMacroReference() {
|
||||||
|
return fName;
|
||||||
|
}
|
||||||
|
|
||||||
public IASTPreprocessorMacroDefinition getMacroDefinition() {
|
public IASTPreprocessorMacroDefinition getMacroDefinition() {
|
||||||
return fLocationMap.getMacroDefinition((IMacroBinding) fName.getBinding());
|
return fLocationMap.getMacroDefinition((IMacroBinding) fName.getBinding());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public LocationCtxMacroExpansion findSurroundingMacroExpansion(int sequenceNumber, int length) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation getImageLocation(int offset, int length) {
|
||||||
|
if (length == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
final int end= offset+length;
|
||||||
|
int nextToCheck= offset;
|
||||||
|
ImageLocationInfo firstInfo= null;
|
||||||
|
ImageLocationInfo lastInfo= null;
|
||||||
|
for (int i = 0; i < fLocationInfos.length; i++) {
|
||||||
|
ImageLocationInfo info = fLocationInfos[i];
|
||||||
|
if (info.fTokenOffsetInExpansion == nextToCheck) {
|
||||||
|
if (lastInfo == null) {
|
||||||
|
firstInfo= lastInfo= info;
|
||||||
|
}
|
||||||
|
else if (lastInfo.canConcatenate(info)) {
|
||||||
|
lastInfo= info;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (++nextToCheck == end) {
|
||||||
|
return firstInfo.createLocation(fLocationMap, lastInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (info.fTokenOffsetInExpansion > nextToCheck) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ import java.util.List;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
import org.eclipse.cdt.core.dom.ast.IASTComment;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||||
|
@ -120,7 +121,7 @@ public class LocationMap implements ILocationResolver {
|
||||||
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
int nameEndNumber= getSequenceNumberForOffset(nameEndOffset);
|
||||||
int endNumber= getSequenceNumberForOffset(endOffset);
|
int endNumber= getSequenceNumberForOffset(endOffset);
|
||||||
final ASTInclusionStatement inclusionStatement=
|
final ASTInclusionStatement inclusionStatement=
|
||||||
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, name, filename, userInclude, true);
|
new ASTInclusionStatement(fTranslationUnit, startNumber, nameNumber, nameEndNumber, endNumber, name, filename, userInclude, true);
|
||||||
fDirectives.add(inclusionStatement);
|
fDirectives.add(inclusionStatement);
|
||||||
fCurrentContext= new LocationCtxFile((LocationCtxContainer) fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
fCurrentContext= new LocationCtxFile((LocationCtxContainer) fCurrentContext, filename, buffer, startOffset, endOffset, endNumber, inclusionStatement);
|
||||||
fLastChildInsertionOffset= 0;
|
fLastChildInsertionOffset= 0;
|
||||||
|
@ -208,8 +209,8 @@ public class LocationMap implements ILocationResolver {
|
||||||
startOffset= getSequenceNumberForOffset(startOffset);
|
startOffset= getSequenceNumberForOffset(startOffset);
|
||||||
nameOffset= getSequenceNumberForOffset(nameOffset);
|
nameOffset= getSequenceNumberForOffset(nameOffset);
|
||||||
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
nameEndOffset= getSequenceNumberForOffset(nameEndOffset);
|
||||||
// not using endOffset, compatible with 4.0: endOffset= getSequenceNumberForOffset(endOffset);
|
endOffset= getSequenceNumberForOffset(endOffset);
|
||||||
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, name, filename, userInclude, active));
|
fDirectives.add(new ASTInclusionStatement(fTranslationUnit, startOffset, nameOffset, nameEndOffset, endOffset, name, filename, userInclude, active));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
public void encounteredComment(int offset, int endOffset, boolean isBlockComment) {
|
||||||
|
@ -366,6 +367,25 @@ public class LocationMap implements ILocationResolver {
|
||||||
return (IASTNodeLocation[]) result.toArray(new IASTNodeLocation[result.size()]);
|
return (IASTNodeLocation[]) result.toArray(new IASTNodeLocation[result.size()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation getImageLocation(int sequenceNumber, int length) {
|
||||||
|
ArrayList result= new ArrayList();
|
||||||
|
fRootContext.collectLocations(sequenceNumber, length, result);
|
||||||
|
if (result.size() != 1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
IASTNodeLocation loc= (IASTNodeLocation) result.get(0);
|
||||||
|
if (loc instanceof IASTFileLocation) {
|
||||||
|
IASTFileLocation floc= (IASTFileLocation) loc;
|
||||||
|
return new ASTImageLocation(IASTImageLocation.REGULAR_CODE,
|
||||||
|
floc.getFileName(), floc.getNodeOffset(), floc.getNodeLength());
|
||||||
|
}
|
||||||
|
if (loc instanceof ASTMacroExpansionLocation) {
|
||||||
|
ASTMacroExpansionLocation mel= (ASTMacroExpansionLocation) loc;
|
||||||
|
return mel.getImageLocation();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length) {
|
public IASTNode findSurroundingPreprocessorNode(int sequenceNumber, int length) {
|
||||||
int lower=0;
|
int lower=0;
|
||||||
int upper= fDirectives.size()-1;
|
int upper= fDirectives.size()-1;
|
||||||
|
@ -386,6 +406,17 @@ public class LocationMap implements ILocationResolver {
|
||||||
upper= middle-1;
|
upper= middle-1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// search for a macro-expansion
|
||||||
|
LocationCtxMacroExpansion ctx= fRootContext.findSurroundingMacroExpansion(sequenceNumber, length);
|
||||||
|
if (ctx != null) {
|
||||||
|
ASTMacroReferenceName candidate= ctx.getMacroReference();
|
||||||
|
final int candSequenceNumber = candidate.getOffset();
|
||||||
|
final int candEndSequenceNumber = candSequenceNumber + candidate.getLength();
|
||||||
|
if (candSequenceNumber <= sequenceNumber && sequenceNumber + length <= candEndSequenceNumber) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -497,11 +528,12 @@ public class LocationMap implements ILocationResolver {
|
||||||
public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) {
|
public ASTPreprocessorSelectionResult getPreprocessorNode(String path, int offset, int length) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
// mstodo- locations
|
|
||||||
|
// mstodo- old location resolver
|
||||||
public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) {
|
public char[] getUnpreprocessedSignature(IASTNodeLocation[] locations) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
// mstodo- scanner removal
|
// mstodo- old location resolver
|
||||||
public IASTName[] getMacroExpansions() {
|
public IASTName[] getMacroExpansions() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
|
@ -99,7 +99,7 @@ class MacroDefinitionParser {
|
||||||
final char[] nameChars = name.getCharImage();
|
final char[] nameChars = name.getCharImage();
|
||||||
final char[][] paramList= parseParamList(lexer, name);
|
final char[][] paramList= parseParamList(lexer, name);
|
||||||
final Token replacementToken = lexer.currentToken();
|
final Token replacementToken = lexer.currentToken();
|
||||||
if (replacementToken.getType() != Lexer.tEND_OF_INPUT) {
|
if (replacementToken.getType() != IToken.tEND_OF_INPUT) {
|
||||||
throw new InvalidMacroDefinitionException(nameChars, replacementToken.getOffset(), replacementToken.getEndOffset());
|
throw new InvalidMacroDefinitionException(nameChars, replacementToken.getOffset(), replacementToken.getEndOffset());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +222,7 @@ class MacroDefinitionParser {
|
||||||
switch(candidate.getType()) {
|
switch(candidate.getType()) {
|
||||||
case IToken.tCOMPLETION:
|
case IToken.tCOMPLETION:
|
||||||
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, candidate);
|
throw new OffsetLimitReachedException(ORIGIN_PREPROCESSOR_DIRECTIVE, candidate);
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
break loop;
|
break loop;
|
||||||
case IToken.tIDENTIFIER:
|
case IToken.tIDENTIFIER:
|
||||||
|
|
|
@ -19,6 +19,8 @@ import org.eclipse.cdt.core.parser.IToken;
|
||||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||||
import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
|
import org.eclipse.cdt.core.parser.util.CharArrayObjectMap;
|
||||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.ImageLocationInfo.MacroImageLocationInfo;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.ImageLocationInfo.ParameterImageLocationInfo;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner.Lexer.LexerOptions;
|
import org.eclipse.cdt.internal.core.parser.scanner.Lexer.LexerOptions;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser.TokenParameterReference;
|
import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser.TokenParameterReference;
|
||||||
|
|
||||||
|
@ -28,6 +30,7 @@ import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser.TokenP
|
||||||
*/
|
*/
|
||||||
public class MacroExpander {
|
public class MacroExpander {
|
||||||
private static final int ORIGIN = OffsetLimitReachedException.ORIGIN_MACRO_EXPANSION;
|
private static final int ORIGIN = OffsetLimitReachedException.ORIGIN_MACRO_EXPANSION;
|
||||||
|
private static final Token END_TOKEN = new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the beginning and the end of the scope of a macro expansion. Necessary to properly
|
* Marks the beginning and the end of the scope of a macro expansion. Necessary to properly
|
||||||
|
@ -66,17 +69,25 @@ public class MacroExpander {
|
||||||
* Combines a list of tokens with the preprocessor to form the input for macro expansion.
|
* Combines a list of tokens with the preprocessor to form the input for macro expansion.
|
||||||
*/
|
*/
|
||||||
private class TokenSource extends TokenList {
|
private class TokenSource extends TokenList {
|
||||||
private boolean fUseCpp;
|
private final Lexer fLexer;
|
||||||
|
private final boolean fStopAtNewline;
|
||||||
|
|
||||||
public TokenSource(boolean useCpp) {
|
public TokenSource(Lexer lexer, boolean stopAtNewline) {
|
||||||
fUseCpp= useCpp;
|
fLexer= lexer;
|
||||||
|
fStopAtNewline= stopAtNewline;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Token fetchFirst() throws OffsetLimitReachedException {
|
public Token fetchFirst() throws OffsetLimitReachedException {
|
||||||
Token t= removeFirst();
|
Token t= removeFirst();
|
||||||
if (t == null && fUseCpp) {
|
if (t == null && fLexer != null) {
|
||||||
t= fCpp.fetchTokenFromPreprocessor();
|
t= fLexer.currentToken();
|
||||||
|
if (fStopAtNewline && t.getType() == Lexer.tNEWLINE) {
|
||||||
|
t= END_TOKEN;
|
||||||
|
}
|
||||||
|
else {
|
||||||
fEndOffset= t.getEndOffset();
|
fEndOffset= t.getEndOffset();
|
||||||
|
fLexer.nextToken();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
@ -98,36 +109,46 @@ public class MacroExpander {
|
||||||
t= (Token) t.getNext();
|
t= (Token) t.getNext();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fUseCpp) {
|
if (fLexer != null) {
|
||||||
return fCpp.findLParenthesisInContext();
|
t= fLexer.currentToken();
|
||||||
|
if (!fStopAtNewline) {
|
||||||
|
while(t.getType() == Lexer.tNEWLINE) {
|
||||||
|
t= fLexer.nextToken();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
return t.getType() == IToken.tLPAREN;
|
||||||
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private final ILexerLog fLog;
|
||||||
private final MacroDefinitionParser fDefinitionParser;
|
private final MacroDefinitionParser fDefinitionParser;
|
||||||
private final CharArrayObjectMap fDictionary;
|
private final CharArrayObjectMap fDictionary;
|
||||||
private final LocationMap fLocationMap;
|
private final LocationMap fLocationMap;
|
||||||
private final CPreprocessor fCpp;
|
|
||||||
private final LexerOptions fLexOptions;
|
private final LexerOptions fLexOptions;
|
||||||
private int fEndOffset;
|
|
||||||
private ArrayList fImplicitMacroExpansions= new ArrayList();
|
private ArrayList fImplicitMacroExpansions= new ArrayList();
|
||||||
|
private ArrayList fImageLocationInfos= new ArrayList();
|
||||||
private boolean fCompletionMode;
|
private boolean fCompletionMode;
|
||||||
private int fStartOffset;
|
private int fStartOffset;
|
||||||
|
private int fEndOffset;
|
||||||
|
|
||||||
public MacroExpander(CPreprocessor cpp, CharArrayObjectMap dict, LocationMap locationMap, MacroDefinitionParser mdp, LexerOptions lexOptions) {
|
public MacroExpander(ILexerLog log, CharArrayObjectMap dict, LocationMap locationMap, MacroDefinitionParser mdp, LexerOptions lexOptions) {
|
||||||
fCpp= cpp;
|
|
||||||
fDictionary= dict;
|
fDictionary= dict;
|
||||||
fLocationMap= locationMap;
|
fLocationMap= locationMap;
|
||||||
fDefinitionParser= mdp;
|
fDefinitionParser= mdp;
|
||||||
fLexOptions= lexOptions;
|
fLexOptions= lexOptions;
|
||||||
|
fLog= log;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expects that the identifier has been consumed, stores the result in the list provided and returns the
|
* Expects that the identifier has been consumed, stores the result in the list provided.
|
||||||
* end offset of the last token read from the preprocessor input.
|
|
||||||
*/
|
*/
|
||||||
public int expand(PreprocessorMacro macro, Token identifier, boolean completionMode, TokenList expansion) throws OffsetLimitReachedException {
|
public TokenList expand(Lexer lexer, boolean stopAtNewline, PreprocessorMacro macro, Token identifier, boolean completionMode) throws OffsetLimitReachedException {
|
||||||
|
fImplicitMacroExpansions.clear();
|
||||||
|
fImageLocationInfos.clear();
|
||||||
|
|
||||||
fStartOffset= identifier.getOffset();
|
fStartOffset= identifier.getOffset();
|
||||||
fEndOffset= identifier.getEndOffset();
|
fEndOffset= identifier.getEndOffset();
|
||||||
fCompletionMode= completionMode;
|
fCompletionMode= completionMode;
|
||||||
|
@ -135,13 +156,15 @@ public class MacroExpander {
|
||||||
IdentityHashMap forbidden= new IdentityHashMap();
|
IdentityHashMap forbidden= new IdentityHashMap();
|
||||||
|
|
||||||
// setup input sequence
|
// setup input sequence
|
||||||
TokenSource input= new TokenSource(true);
|
TokenSource input= new TokenSource(lexer, stopAtNewline);
|
||||||
TokenList firstExpansion= new TokenList();
|
TokenList firstExpansion= new TokenList();
|
||||||
expandOne(identifier, macro, forbidden, input, firstExpansion);
|
expandOne(identifier, macro, forbidden, input, firstExpansion);
|
||||||
input.prepend(firstExpansion);
|
input.prepend(firstExpansion);
|
||||||
|
|
||||||
expandAll(input, forbidden, expansion);
|
TokenList result= expandAll(input, forbidden);
|
||||||
return fEndOffset;
|
postProcessTokens(result);
|
||||||
|
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -161,8 +184,7 @@ public class MacroExpander {
|
||||||
for (int i = 0; i < argInputs.length; i++) {
|
for (int i = 0; i < argInputs.length; i++) {
|
||||||
final TokenSource argInput = argInputs[i];
|
final TokenSource argInput = argInputs[i];
|
||||||
clonedArgs[i]= argInput.cloneTokens();
|
clonedArgs[i]= argInput.cloneTokens();
|
||||||
final TokenList expandedArg= new TokenList();
|
final TokenList expandedArg= expandAll(argInput, forbidden);
|
||||||
expandAll(argInput, forbidden, expandedArg);
|
|
||||||
expandedArgs[i]= expandedArg;
|
expandedArgs[i]= expandedArg;
|
||||||
}
|
}
|
||||||
replaceArgs(macro, clonedArgs, expandedArgs, result);
|
replaceArgs(macro, clonedArgs, expandedArgs, result);
|
||||||
|
@ -174,7 +196,8 @@ public class MacroExpander {
|
||||||
return lastConsumed;
|
return lastConsumed;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void expandAll(TokenSource input, IdentityHashMap forbidden, TokenList result) throws OffsetLimitReachedException {
|
private TokenList expandAll(TokenSource input, IdentityHashMap forbidden) throws OffsetLimitReachedException {
|
||||||
|
final TokenList result= new TokenList();
|
||||||
Token l= null;
|
Token l= null;
|
||||||
Token t= input.removeFirst();
|
Token t= input.removeFirst();
|
||||||
while(t != null) {
|
while(t != null) {
|
||||||
|
@ -194,8 +217,11 @@ public class MacroExpander {
|
||||||
result.append(t);
|
result.append(t);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// mstodo- image location
|
ImageLocationInfo info= null;
|
||||||
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
|
if (fLexOptions.fCreateImageLocations) {
|
||||||
|
info = createImageLocationInfo(t);
|
||||||
|
}
|
||||||
|
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, info));
|
||||||
|
|
||||||
TokenList replacement= new TokenList();
|
TokenList replacement= new TokenList();
|
||||||
|
|
||||||
|
@ -213,6 +239,20 @@ public class MacroExpander {
|
||||||
l= t;
|
l= t;
|
||||||
t= input.removeFirst();
|
t= input.removeFirst();
|
||||||
}
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ImageLocationInfo createImageLocationInfo(Token t) {
|
||||||
|
final Object s= t.fSource;
|
||||||
|
if (s instanceof ObjectStyleMacro) {
|
||||||
|
return new MacroImageLocationInfo((ObjectStyleMacro) s, fEndOffset, fEndOffset);
|
||||||
|
}
|
||||||
|
else if (s instanceof CPreprocessor) {
|
||||||
|
int sequenceNumber= fLocationMap.getSequenceNumberForOffset(t.getOffset());
|
||||||
|
int sequenceEndNumber= fLocationMap.getSequenceNumberForOffset(t.getEndOffset());
|
||||||
|
return new ParameterImageLocationInfo(sequenceNumber, sequenceEndNumber);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addSpacemarker(Token l, Token t, TokenList target) {
|
private void addSpacemarker(Token l, Token t, TokenList target) {
|
||||||
|
@ -242,7 +282,7 @@ public class MacroExpander {
|
||||||
int idx= 0;
|
int idx= 0;
|
||||||
int nesting= -1;
|
int nesting= -1;
|
||||||
for (int i = 0; i < result.length; i++) {
|
for (int i = 0; i < result.length; i++) {
|
||||||
result[i]= new TokenSource(false);
|
result[i]= new TokenSource(null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean complete= false;
|
boolean complete= false;
|
||||||
|
@ -256,7 +296,7 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
lastToken= t;
|
lastToken= t;
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case Lexer.tEND_OF_INPUT:
|
case IToken.tEND_OF_INPUT:
|
||||||
assert nesting >= 0;
|
assert nesting >= 0;
|
||||||
if (fCompletionMode) {
|
if (fCompletionMode) {
|
||||||
throw new OffsetLimitReachedException(ORIGIN, null);
|
throw new OffsetLimitReachedException(ORIGIN, null);
|
||||||
|
@ -266,7 +306,6 @@ public class MacroExpander {
|
||||||
throw new OffsetLimitReachedException(ORIGIN, t);
|
throw new OffsetLimitReachedException(ORIGIN, t);
|
||||||
|
|
||||||
case Lexer.tNEWLINE:
|
case Lexer.tNEWLINE:
|
||||||
assert false; // we should not get any newlines from macros or the preprocessor.
|
|
||||||
continue loop;
|
continue loop;
|
||||||
|
|
||||||
case IToken.tLPAREN:
|
case IToken.tLPAREN:
|
||||||
|
@ -336,7 +375,7 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleProblem(int problemID, char[] arg) {
|
private void handleProblem(int problemID, char[] arg) {
|
||||||
fCpp.handleProblem(problemID, arg, fStartOffset, fEndOffset);
|
fLog.handleProblem(problemID, arg, fStartOffset, fEndOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
||||||
|
@ -545,7 +584,7 @@ public class MacroExpander {
|
||||||
try {
|
try {
|
||||||
Token t1= lex.nextToken();
|
Token t1= lex.nextToken();
|
||||||
Token t2= lex.nextToken();
|
Token t2= lex.nextToken();
|
||||||
if (t1.getType() != Lexer.tEND_OF_INPUT && t2.getType() == Lexer.tEND_OF_INPUT) {
|
if (t1.getType() != IToken.tEND_OF_INPUT && t2.getType() == IToken.tEND_OF_INPUT) {
|
||||||
t1.setOffset(arg1.getOffset(), arg2.getEndOffset());
|
t1.setOffset(arg1.getOffset(), arg2.getEndOffset());
|
||||||
return t1;
|
return t1;
|
||||||
}
|
}
|
||||||
|
@ -604,26 +643,44 @@ public class MacroExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public IASTName[] createImplicitExpansions() {
|
public IASTName[] clearImplicitExpansions() {
|
||||||
IASTName[] result= (IASTName[]) fImplicitMacroExpansions.toArray(new IASTName[fImplicitMacroExpansions.size()]);
|
IASTName[] result= (IASTName[]) fImplicitMacroExpansions.toArray(new IASTName[fImplicitMacroExpansions.size()]);
|
||||||
fImplicitMacroExpansions.clear();
|
fImplicitMacroExpansions.clear();
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ImageLocationInfo[] createImageLocations(TokenList replacement) {
|
public ImageLocationInfo[] clearImageLocationInfos() {
|
||||||
// mstodo- image locations
|
ImageLocationInfo[] result= (ImageLocationInfo[]) fImageLocationInfos.toArray(new ImageLocationInfo[fImageLocationInfos.size()]);
|
||||||
return ImageLocationInfo.NO_LOCATION_INFOS;
|
fImageLocationInfos.clear();
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void postProcessTokens(TokenList replacement) {
|
||||||
public int adjustOffsets(TokenList replacement) {
|
final boolean createImageLocations= fLexOptions.fCreateImageLocations;
|
||||||
int offset= 0;
|
int offset= 0;
|
||||||
Token l= null;
|
Token l= null;
|
||||||
for (Token t= replacement.first(); t!=null; t= (Token) t.getNext()) {
|
for (Token t= replacement.first(); t!=null; t= (Token) t.getNext()) {
|
||||||
switch(t.getType()) {
|
switch(t.getType()) {
|
||||||
case CPreprocessor.tEXPANDED_IDENTIFIER:
|
case CPreprocessor.tEXPANDED_IDENTIFIER:
|
||||||
t.setType(IToken.tIDENTIFIER);
|
t.setType(IToken.tIDENTIFIER);
|
||||||
|
if (createImageLocations) {
|
||||||
|
ImageLocationInfo info= createImageLocationInfo(t);
|
||||||
|
if (info != null) {
|
||||||
|
info.fTokenOffsetInExpansion= offset;
|
||||||
|
fImageLocationInfos.add(info);
|
||||||
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
case IToken.tIDENTIFIER:
|
||||||
|
if (createImageLocations) {
|
||||||
|
ImageLocationInfo info= createImageLocationInfo(t);
|
||||||
|
if (info != null) {
|
||||||
|
info.fTokenOffsetInExpansion= offset;
|
||||||
|
fImageLocationInfos.add(info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case CPreprocessor.tSCOPE_MARKER:
|
case CPreprocessor.tSCOPE_MARKER:
|
||||||
case CPreprocessor.tSPACE:
|
case CPreprocessor.tSPACE:
|
||||||
case CPreprocessor.tNOSPACE:
|
case CPreprocessor.tNOSPACE:
|
||||||
|
@ -633,6 +690,5 @@ public class MacroExpander {
|
||||||
t.setOffset(offset, ++offset);
|
t.setOffset(offset, ++offset);
|
||||||
l= t;
|
l= t;
|
||||||
}
|
}
|
||||||
return offset;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -217,7 +217,7 @@ class FunctionStyleMacro extends ObjectStyleMacro {
|
||||||
char[][] result= new char[length][];
|
char[][] result= new char[length][];
|
||||||
System.arraycopy(fParamList, 0, result, 0, length-1);
|
System.arraycopy(fParamList, 0, result, 0, length-1);
|
||||||
if (fHasVarArgs == VAARGS) {
|
if (fHasVarArgs == VAARGS) {
|
||||||
result[length-1] = Keywords.cVA_ARGS;
|
result[length-1]= Keywords.cpELLIPSIS;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
final char[] param= fParamList[length-1];
|
final char[] param= fParamList[length-1];
|
||||||
|
|
|
@ -10,6 +10,9 @@
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import org.eclipse.cdt.core.parser.IToken;
|
||||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +21,9 @@ import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
||||||
* Represents part of the input to the preprocessor. This may be a file or the result of a macro expansion.
|
* Represents part of the input to the preprocessor. This may be a file or the result of a macro expansion.
|
||||||
* @since 5.0
|
* @since 5.0
|
||||||
*/
|
*/
|
||||||
abstract class ScannerContext {
|
final class ScannerContext {
|
||||||
|
private static final Token END_TOKEN = new Token(IToken.tEND_OF_INPUT, null, 0, 0);
|
||||||
|
|
||||||
public static final Integer BRANCH_IF = new Integer(0);
|
public static final Integer BRANCH_IF = new Integer(0);
|
||||||
public static final Integer BRANCH_ELIF = new Integer(1);
|
public static final Integer BRANCH_ELIF = new Integer(1);
|
||||||
public static final Integer BRANCH_ELSE = new Integer(2);
|
public static final Integer BRANCH_ELSE = new Integer(2);
|
||||||
|
@ -26,14 +31,26 @@ abstract class ScannerContext {
|
||||||
|
|
||||||
private final ILocationCtx fLocationCtx;
|
private final ILocationCtx fLocationCtx;
|
||||||
private final ScannerContext fParent;
|
private final ScannerContext fParent;
|
||||||
|
private final Lexer fLexer;
|
||||||
|
private ArrayList fBranches= null;
|
||||||
|
|
||||||
|
private Token fTokens;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param ctx
|
* @param ctx
|
||||||
* @param parent context to be used after this context is done.
|
* @param parent context to be used after this context is done.
|
||||||
*/
|
*/
|
||||||
public ScannerContext(ILocationCtx ctx, ScannerContext parent) {
|
public ScannerContext(ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
||||||
fLocationCtx= ctx;
|
fLocationCtx= ctx;
|
||||||
fParent= parent;
|
fParent= parent;
|
||||||
|
fLexer= lexer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ScannerContext(ILocationCtx ctx, ScannerContext parent, TokenList tokens) {
|
||||||
|
fLocationCtx= ctx;
|
||||||
|
fParent= parent;
|
||||||
|
fLexer= null;
|
||||||
|
fTokens= tokens.first();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,37 +69,70 @@ abstract class ScannerContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests whether or not the current identifier of this context are subject to macro-expansion.
|
* Returns the lexer for this context.
|
||||||
*/
|
*/
|
||||||
public boolean expandsMacros() {
|
public final Lexer getLexer() {
|
||||||
return true;
|
return fLexer;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the lexer for a preprocessing directive or <code>null</code> if the current
|
|
||||||
* token is not the start of a preprocessing directive.
|
|
||||||
* <p>
|
|
||||||
* The current token starts a directive, whenever the context supports directives,
|
|
||||||
* and the current token is a pound that occurs as the first token on the line.
|
|
||||||
*/
|
|
||||||
public abstract Lexer getLexerForPPDirective();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Needs to be called whenever we change over to another branch of conditional
|
* Needs to be called whenever we change over to another branch of conditional
|
||||||
* compilation. Returns whether the change is legal at this point or not.
|
* compilation. Returns whether the change is legal at this point or not.
|
||||||
*/
|
*/
|
||||||
public abstract boolean changeBranch(Integer state);
|
public final boolean changeBranch(Integer branchKind) {
|
||||||
|
if (fBranches == null) {
|
||||||
|
fBranches= new ArrayList();
|
||||||
|
}
|
||||||
|
|
||||||
|
// an if starts a new conditional construct
|
||||||
|
if (branchKind == BRANCH_IF) {
|
||||||
|
fBranches.add(branchKind);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// if we are not inside of an conditional there shouldn't be an #else, #elsif or #end
|
||||||
|
final int pos= fBranches.size()-1;
|
||||||
|
if (pos < 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// an #end just pops one construct.
|
||||||
|
if (branchKind == BRANCH_END) {
|
||||||
|
fBranches.remove(pos);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// #elsif or #else cannot appear after another #else
|
||||||
|
if (fBranches.get(pos) == BRANCH_ELSE) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// overwrite #if, #elsif with #elsif or #else
|
||||||
|
fBranches.set(pos, branchKind);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the current token from this context. When called before calling {@link #nextPPToken()}
|
* Returns the current token from this context. When called before calling {@link #nextPPToken()}
|
||||||
* a token of type {@link Lexer#tBEFORE_INPUT} will be returned.
|
* a token of type {@link Lexer#tBEFORE_INPUT} will be returned.
|
||||||
* @since 5.0
|
* @since 5.0
|
||||||
*/
|
*/
|
||||||
public abstract Token currentLexerToken();
|
public final Token currentLexerToken() {
|
||||||
|
if (fLexer != null) {
|
||||||
|
return fLexer.currentToken();
|
||||||
|
}
|
||||||
|
if (fTokens != null) {
|
||||||
|
return fTokens;
|
||||||
|
}
|
||||||
|
return END_TOKEN;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the next token from this context.
|
* Returns the next token from this context.
|
||||||
*/
|
*/
|
||||||
public abstract Token nextPPToken() throws OffsetLimitReachedException;
|
public Token nextPPToken() throws OffsetLimitReachedException {
|
||||||
|
if (fLexer != null) {
|
||||||
|
return fLexer.nextToken();
|
||||||
|
}
|
||||||
|
if (fTokens != null) {
|
||||||
|
fTokens= (Token) fTokens.getNext();
|
||||||
|
}
|
||||||
|
return currentLexerToken();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,75 +0,0 @@
|
||||||
/*******************************************************************************
|
|
||||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
|
||||||
* All rights reserved. This program and the accompanying materials
|
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
|
||||||
* which accompanies this distribution, and is available at
|
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
|
||||||
*
|
|
||||||
* Contributors:
|
|
||||||
* Markus Schorn - initial API and implementation
|
|
||||||
*******************************************************************************/
|
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
|
|
||||||
import org.eclipse.cdt.core.parser.IToken;
|
|
||||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps a {@link Lexer} and provides additional information for the preprocessor.
|
|
||||||
* <p>
|
|
||||||
* Note that for parsing the preprocessor directives the lexer is used directly, so this class
|
|
||||||
* is not allowed to store any state about the lexing process.
|
|
||||||
*
|
|
||||||
* since 5.0
|
|
||||||
*/
|
|
||||||
public class ScannerContextFile extends ScannerContext {
|
|
||||||
|
|
||||||
private final Lexer fLexer;
|
|
||||||
private final ArrayList fBranches= new ArrayList();
|
|
||||||
|
|
||||||
public ScannerContextFile(ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
|
||||||
super(ctx, parent);
|
|
||||||
fLexer= lexer;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token currentLexerToken() {
|
|
||||||
return fLexer.currentToken();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
|
||||||
return fLexer.nextToken();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Lexer getLexerForPPDirective() {
|
|
||||||
if (fLexer.currentTokenIsFirstOnLine() && fLexer.currentToken().getType() == IToken.tPOUND) {
|
|
||||||
return fLexer;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean changeBranch(Integer branchKind) {
|
|
||||||
// an if starts a new conditional construct
|
|
||||||
if (branchKind == BRANCH_IF) {
|
|
||||||
fBranches.add(branchKind);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// if we are not inside of an conditional there shouldn't be an #else, #elsif or #end
|
|
||||||
final int pos= fBranches.size()-1;
|
|
||||||
if (pos < 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// an #end just pops one construct.
|
|
||||||
if (branchKind == BRANCH_END) {
|
|
||||||
fBranches.remove(pos);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// #elsif or #else cannot appear after another #else
|
|
||||||
if (fBranches.get(pos) == BRANCH_ELSE) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// overwrite #if, #elsif with #elsif or #else
|
|
||||||
fBranches.set(pos, branchKind);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,49 +0,0 @@
|
||||||
/*******************************************************************************
|
|
||||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
|
||||||
* All rights reserved. This program and the accompanying materials
|
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
|
||||||
* which accompanies this distribution, and is available at
|
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
|
||||||
*
|
|
||||||
* Contributors:
|
|
||||||
* Markus Schorn - initial API and implementation
|
|
||||||
*******************************************************************************/
|
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public class ScannerContextMacroExpansion extends ScannerContext {
|
|
||||||
private static final Token END_TOKEN = new Token(Lexer.tEND_OF_INPUT, null, 0, 0);
|
|
||||||
|
|
||||||
private Token fTokens;
|
|
||||||
|
|
||||||
public ScannerContextMacroExpansion(ILocationCtx ctx, ScannerContext parent, TokenList tokens) {
|
|
||||||
super(ctx, parent);
|
|
||||||
fTokens= tokens.first();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean changeBranch(Integer state) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token currentLexerToken() {
|
|
||||||
Token t= fTokens;
|
|
||||||
if (t == null) {
|
|
||||||
return END_TOKEN;
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Lexer getLexerForPPDirective() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token nextPPToken() {
|
|
||||||
fTokens= (Token) fTokens.getNext();
|
|
||||||
return currentLexerToken();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean expandsMacros() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,46 +0,0 @@
|
||||||
/*******************************************************************************
|
|
||||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
|
||||||
* All rights reserved. This program and the accompanying materials
|
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
|
||||||
* which accompanies this distribution, and is available at
|
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
|
||||||
*
|
|
||||||
* Contributors:
|
|
||||||
* Markus Schorn - initial API and implementation
|
|
||||||
*******************************************************************************/
|
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
|
||||||
|
|
||||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Context used to run the preprocessor while swallowing all tokens.
|
|
||||||
* Needed to process macro-files as specified by the -imacro compiler option of gcc.
|
|
||||||
* @since 5.0
|
|
||||||
*/
|
|
||||||
public class ScannerContextMacroFile extends ScannerContextFile {
|
|
||||||
private final CPreprocessor fCpp;
|
|
||||||
private boolean fSkippingTokens= false;
|
|
||||||
|
|
||||||
public ScannerContextMacroFile(CPreprocessor cpp, ILocationCtx ctx, ScannerContext parent, Lexer lexer) {
|
|
||||||
super(ctx, parent, lexer);
|
|
||||||
fCpp= cpp;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
|
||||||
if (fSkippingTokens) {
|
|
||||||
final Token t= super.nextPPToken();
|
|
||||||
if (t.getType() == Lexer.tEND_OF_INPUT) {
|
|
||||||
fSkippingTokens= false;
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
// use preprocessor to read tokens off this context, until this context is done.
|
|
||||||
fSkippingTokens= true;
|
|
||||||
Token t;
|
|
||||||
do {
|
|
||||||
t= fCpp.fetchTokenFromPreprocessor();
|
|
||||||
} while (fSkippingTokens);
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,116 +0,0 @@
|
||||||
/*******************************************************************************
|
|
||||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
|
||||||
* All rights reserved. This program and the accompanying materials
|
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
|
||||||
* which accompanies this distribution, and is available at
|
|
||||||
* http://www.eclipse.org/legal/epl-v10.html
|
|
||||||
*
|
|
||||||
* Contributors:
|
|
||||||
* Markus Schorn - initial API and implementation
|
|
||||||
*******************************************************************************/
|
|
||||||
|
|
||||||
package org.eclipse.cdt.internal.core.parser.scanner;
|
|
||||||
|
|
||||||
import org.eclipse.cdt.core.parser.IToken;
|
|
||||||
import org.eclipse.cdt.core.parser.Keywords;
|
|
||||||
import org.eclipse.cdt.core.parser.OffsetLimitReachedException;
|
|
||||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps a ScannerContext and modifies its behavior by limiting the tokens
|
|
||||||
* to the ones on the current line. Instead of the newline token an end-of-input
|
|
||||||
* token is returned. The newline token of the underlying context is not consumed.
|
|
||||||
* @since 5.0
|
|
||||||
*/
|
|
||||||
public final class ScannerContextPPDirective extends ScannerContext {
|
|
||||||
|
|
||||||
private static final int STATE_PREVENT_EXPANSION = 1;
|
|
||||||
private static final int STATE_DEFINED_LPAREN = 2;
|
|
||||||
private static final int STATE_DEFINED = 3;
|
|
||||||
private final Lexer fLexer;
|
|
||||||
private Token fToken;
|
|
||||||
private boolean fConvertDefinedToken;
|
|
||||||
private int fPreventMacroExpansion= 0;
|
|
||||||
private int fLastEndOffset;
|
|
||||||
|
|
||||||
public ScannerContextPPDirective(Lexer lexer, boolean convertDefinedToken) {
|
|
||||||
super(null, null);
|
|
||||||
fLexer= lexer;
|
|
||||||
fConvertDefinedToken= convertDefinedToken;
|
|
||||||
|
|
||||||
final Token currentToken = lexer.currentToken();
|
|
||||||
fLastEndOffset= currentToken.getOffset();
|
|
||||||
fToken= convertToken(currentToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token currentLexerToken() {
|
|
||||||
return fToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Token nextPPToken() throws OffsetLimitReachedException {
|
|
||||||
if (fToken.getType() == Lexer.tEND_OF_INPUT) {
|
|
||||||
return fToken;
|
|
||||||
}
|
|
||||||
Token t1= fLexer.nextToken();
|
|
||||||
t1 = convertToken(t1);
|
|
||||||
fToken= t1;
|
|
||||||
|
|
||||||
Token t = t1;
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Lexer getLexerForPPDirective() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean changeBranch(Integer state) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Token convertToken(Token t) {
|
|
||||||
switch (t.getType()) {
|
|
||||||
case Lexer.tNEWLINE:
|
|
||||||
t= new Token(Lexer.tEND_OF_INPUT, null, t.getEndOffset(), t.getEndOffset());
|
|
||||||
break;
|
|
||||||
case IToken.tIDENTIFIER:
|
|
||||||
if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
|
|
||||||
t.setType(CPreprocessor.tDEFINED);
|
|
||||||
fPreventMacroExpansion= STATE_DEFINED;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
switch(fPreventMacroExpansion) {
|
|
||||||
case STATE_DEFINED:
|
|
||||||
case STATE_DEFINED_LPAREN:
|
|
||||||
fPreventMacroExpansion= STATE_PREVENT_EXPANSION;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
fPreventMacroExpansion= 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fLastEndOffset= t.getEndOffset();
|
|
||||||
break;
|
|
||||||
case IToken.tLPAREN:
|
|
||||||
if (fPreventMacroExpansion == STATE_DEFINED) {
|
|
||||||
fPreventMacroExpansion= STATE_DEFINED_LPAREN; // suppress macro-expansion for 'defined (id)'
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fPreventMacroExpansion= 0;
|
|
||||||
}
|
|
||||||
fLastEndOffset= t.getEndOffset();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
fPreventMacroExpansion= 0;
|
|
||||||
fLastEndOffset= t.getEndOffset();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean expandsMacros() {
|
|
||||||
return fPreventMacroExpansion == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getLastEndOffset() {
|
|
||||||
return fLastEndOffset;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4293,4 +4293,6 @@ abstract class BaseScanner implements IScanner {
|
||||||
|
|
||||||
protected abstract IToken newToken(int signal, char[] buffer);
|
protected abstract IToken newToken(int signal, char[] buffer);
|
||||||
|
|
||||||
|
public void setComputeImageLocations(boolean val) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,19 +104,20 @@ public class FunctionStyleMacro extends ObjectStyleMacro {
|
||||||
return sig;
|
return sig;
|
||||||
|
|
||||||
int len = name.length + 2 /*()*/;
|
int len = name.length + 2 /*()*/;
|
||||||
for( int i = 0; i < arglist.length && arglist[i] != null; i++ ){
|
final char[][] params = getOriginalParameters();
|
||||||
if( i + 1 < arglist.length && arglist[i+1] != null)
|
for( int i = 0; i < params.length && params[i] != null; i++ ){
|
||||||
|
if( i + 1 < params.length && params[i+1] != null)
|
||||||
len += 1; /*,*/
|
len += 1; /*,*/
|
||||||
len += arglist[i].length;
|
len += params[i].length;
|
||||||
}
|
}
|
||||||
sig = new char[len];
|
sig = new char[len];
|
||||||
System.arraycopy( name, 0, sig, 0, name.length );
|
System.arraycopy( name, 0, sig, 0, name.length );
|
||||||
sig[name.length] = '(';
|
sig[name.length] = '(';
|
||||||
int idx = name.length + 1;
|
int idx = name.length + 1;
|
||||||
for( int i = 0; i < arglist.length && arglist[i] != null; i++ ){
|
for( int i = 0; i < params.length && params[i] != null; i++ ){
|
||||||
System.arraycopy( arglist[i], 0, sig, idx, arglist[i].length );
|
System.arraycopy( params[i], 0, sig, idx, params[i].length );
|
||||||
idx += arglist[i].length;
|
idx += params[i].length;
|
||||||
if( i + 1 < arglist.length && arglist[i+1] != null )
|
if( i + 1 < params.length && params[i+1] != null )
|
||||||
sig[idx++] = ',';
|
sig[idx++] = ',';
|
||||||
}
|
}
|
||||||
sig[idx] = ')';
|
sig[idx] = ')';
|
||||||
|
|
|
@ -771,6 +771,10 @@ public class LocationMap implements ILocationResolver, IScannerPreprocessorLog {
|
||||||
return r_unclear;
|
return r_unclear;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTFileLocation getExpansionLocation() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static interface _IPreprocessorDirective {
|
public static interface _IPreprocessorDirective {
|
||||||
|
@ -1022,6 +1026,10 @@ public class LocationMap implements ILocationResolver, IScannerPreprocessorLog {
|
||||||
this.expansion = exp;
|
this.expansion = exp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTFileLocation getExpansionLocation() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class Location implements IASTNodeLocation {
|
public abstract static class Location implements IASTNodeLocation {
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.eclipse.cdt.core.dom.ast.ASTVisitor;
|
||||||
import org.eclipse.cdt.core.dom.ast.DOMException;
|
import org.eclipse.cdt.core.dom.ast.DOMException;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
import org.eclipse.cdt.core.dom.ast.IASTCompletionContext;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
|
import org.eclipse.cdt.core.dom.ast.IASTImageLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTName;
|
import org.eclipse.cdt.core.dom.ast.IASTName;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
import org.eclipse.cdt.core.dom.ast.IASTNode;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
||||||
|
@ -149,6 +150,10 @@ public class PDOMASTAdapter {
|
||||||
public char[] toCharArray() {
|
public char[] toCharArray() {
|
||||||
return fDelegate.toCharArray();
|
return fDelegate.toCharArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public IASTImageLocation getImageLocation() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class AnonymousEnumeration implements IEnumeration {
|
private static class AnonymousEnumeration implements IEnumeration {
|
||||||
|
|
|
@ -199,7 +199,7 @@ public abstract class PDOMIndexerTask extends PDOMWriter implements IPDOMIndexer
|
||||||
private void internalParseTUs(IWritableIndex index, int readlockCount, Collection sources, Collection headers, IProgressMonitor monitor) throws CoreException, InterruptedException {
|
private void internalParseTUs(IWritableIndex index, int readlockCount, Collection sources, Collection headers, IProgressMonitor monitor) throws CoreException, InterruptedException {
|
||||||
TodoTaskUpdater taskUpdater = new TodoTaskUpdater();
|
TodoTaskUpdater taskUpdater = new TodoTaskUpdater();
|
||||||
|
|
||||||
int options= AbstractLanguage.OPTION_ADD_COMMENTS;
|
int options= AbstractLanguage.OPTION_ADD_COMMENTS | AbstractLanguage.OPTION_NO_IMAGE_LOCATIONS;
|
||||||
if (checkProperty(IndexerPreferences.KEY_SKIP_ALL_REFERENCES)) {
|
if (checkProperty(IndexerPreferences.KEY_SKIP_ALL_REFERENCES)) {
|
||||||
options |= AbstractLanguage.OPTION_SKIP_FUNCTION_BODIES;
|
options |= AbstractLanguage.OPTION_SKIP_FUNCTION_BODIES;
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.eclipse.cdt.core.dom.parser.cpp.GPPParserExtensionConfiguration;
|
||||||
import org.eclipse.cdt.core.dom.parser.cpp.GPPScannerExtensionConfiguration;
|
import org.eclipse.cdt.core.dom.parser.cpp.GPPScannerExtensionConfiguration;
|
||||||
import org.eclipse.cdt.core.dom.parser.cpp.ICPPParserExtensionConfiguration;
|
import org.eclipse.cdt.core.dom.parser.cpp.ICPPParserExtensionConfiguration;
|
||||||
import org.eclipse.cdt.core.parser.CodeReader;
|
import org.eclipse.cdt.core.parser.CodeReader;
|
||||||
|
import org.eclipse.cdt.core.parser.IParserLogService;
|
||||||
import org.eclipse.cdt.core.parser.IScanner;
|
import org.eclipse.cdt.core.parser.IScanner;
|
||||||
import org.eclipse.cdt.core.parser.IScannerInfo;
|
import org.eclipse.cdt.core.parser.IScannerInfo;
|
||||||
import org.eclipse.cdt.core.parser.IScannerInfoProvider;
|
import org.eclipse.cdt.core.parser.IScannerInfoProvider;
|
||||||
|
@ -37,6 +38,7 @@ import org.eclipse.cdt.core.parser.ParserUtil;
|
||||||
import org.eclipse.cdt.core.parser.ScannerInfo;
|
import org.eclipse.cdt.core.parser.ScannerInfo;
|
||||||
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
import org.eclipse.cdt.internal.core.dom.parser.c.GNUCSourceParser;
|
||||||
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
import org.eclipse.cdt.internal.core.dom.parser.cpp.GNUCPPSourceParser;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
import org.eclipse.cdt.internal.core.parser.scanner2.DOMScanner;
|
||||||
import org.eclipse.core.resources.IFile;
|
import org.eclipse.core.resources.IFile;
|
||||||
import org.eclipse.core.resources.IProject;
|
import org.eclipse.core.resources.IProject;
|
||||||
|
@ -123,9 +125,8 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
||||||
scannerExtensionConfiguration = CPP_GNU_SCANNER_EXTENSION;
|
scannerExtensionConfiguration = CPP_GNU_SCANNER_EXTENSION;
|
||||||
else
|
else
|
||||||
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
||||||
|
scanner= createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, l, ParserFactory.createDefaultLogService(),
|
||||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
scannerExtensionConfiguration, fileCreator);
|
||||||
l, ParserFactory.createDefaultLogService(), scannerExtensionConfiguration, fileCreator);
|
|
||||||
scanner.setScanComments(parseComment);
|
scanner.setScanComments(parseComment);
|
||||||
//assume GCC
|
//assume GCC
|
||||||
if( l == ParserLanguage.C )
|
if( l == ParserLanguage.C )
|
||||||
|
@ -137,12 +138,10 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
||||||
{
|
{
|
||||||
String dialect = configuration.getParserDialect();
|
String dialect = configuration.getParserDialect();
|
||||||
if( dialect.equals( dialects[0]) || dialect.equals( dialects[2]))
|
if( dialect.equals( dialects[0]) || dialect.equals( dialects[2]))
|
||||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
scanner= createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.C,
|
||||||
ParserLanguage.C,
|
|
||||||
ParserUtil.getScannerLogService(), C_GNU_SCANNER_EXTENSION, fileCreator);
|
ParserUtil.getScannerLogService(), C_GNU_SCANNER_EXTENSION, fileCreator);
|
||||||
else if( dialect.equals( dialects[1] ) || dialect.equals( dialects[3] ))
|
else if( dialect.equals( dialects[1] ) || dialect.equals( dialects[3] ))
|
||||||
scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE,
|
scanner = createScanner(reader, scanInfo, ParserMode.COMPLETE_PARSE, ParserLanguage.CPP,
|
||||||
ParserLanguage.CPP,
|
|
||||||
ParserUtil.getScannerLogService(), CPP_GNU_SCANNER_EXTENSION, fileCreator);
|
ParserUtil.getScannerLogService(), CPP_GNU_SCANNER_EXTENSION, fileCreator);
|
||||||
else
|
else
|
||||||
throw new UnsupportedDialectException();
|
throw new UnsupportedDialectException();
|
||||||
|
@ -208,9 +207,8 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
||||||
else
|
else
|
||||||
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
scannerExtensionConfiguration = C_GNU_SCANNER_EXTENSION;
|
||||||
|
|
||||||
IScanner scanner = new DOMScanner(reader, scanInfo, ParserMode.COMPLETION_PARSE,
|
IScanner scanner= createScanner(reader, scanInfo, ParserMode.COMPLETION_PARSE, l,
|
||||||
l, ParserFactory.createDefaultLogService(),
|
ParserFactory.createDefaultLogService(), scannerExtensionConfiguration, fileCreator);
|
||||||
scannerExtensionConfiguration, fileCreator);
|
|
||||||
scanner.setContentAssistMode(offset);
|
scanner.setContentAssistMode(offset);
|
||||||
|
|
||||||
// assume GCC
|
// assume GCC
|
||||||
|
@ -230,6 +228,15 @@ public class InternalASTServiceProvider implements IASTServiceProvider {
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private IScanner createScanner(CodeReader reader, IScannerInfo scanInfo,
|
||||||
|
ParserMode mode, ParserLanguage lang, IParserLogService log,
|
||||||
|
IScannerExtensionConfiguration scanConfig, ICodeReaderFactory fileCreator) {
|
||||||
|
if (CPreprocessor.PROP_VALUE.equals(System.getProperty("scanner"))) { //$NON-NLS-1$
|
||||||
|
return new CPreprocessor(reader, scanInfo, lang, log, scanConfig, fileCreator);
|
||||||
|
}
|
||||||
|
return new DOMScanner(reader, scanInfo, mode, lang, log, scanConfig, fileCreator);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* (non-Javadoc)
|
* (non-Javadoc)
|
||||||
*
|
*
|
||||||
|
|
|
@ -119,7 +119,8 @@ public class CallHierarchyBaseTest extends BaseUITestCase {
|
||||||
for (int i=0; i<200; i++) {
|
for (int i=0; i<200; i++) {
|
||||||
item= root.getItem(i1);
|
item= root.getItem(i1);
|
||||||
try {
|
try {
|
||||||
if (!"...".equals(item.getText())) {
|
String text= item.getText();
|
||||||
|
if (!"...".equals(text) && !"".equals(text)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} catch (SWTException e) {
|
} catch (SWTException e) {
|
||||||
|
|
|
@ -33,11 +33,32 @@ public class CompletionTest_MacroRef_NoPrefix extends CompletionProposalsBaseTe
|
||||||
"__DATE__",
|
"__DATE__",
|
||||||
"__FILE__",
|
"__FILE__",
|
||||||
"__LINE__",
|
"__LINE__",
|
||||||
"__TIME__"
|
"__STDC__",
|
||||||
|
"__TIME__",
|
||||||
|
"__asm__",
|
||||||
|
"__builtin_constant_p(exp)",
|
||||||
|
"__builtin_va_arg(ap, type)",
|
||||||
|
"__complex__",
|
||||||
|
"__const",
|
||||||
|
"__const__",
|
||||||
|
"__cplusplus",
|
||||||
|
"__extension__",
|
||||||
|
"__imag__",
|
||||||
|
"__inline__",
|
||||||
|
"__null",
|
||||||
|
"__real__",
|
||||||
|
"__restrict",
|
||||||
|
"__restrict__",
|
||||||
|
"__signed__",
|
||||||
|
"__stdcall",
|
||||||
|
"__volatile__"
|
||||||
};
|
};
|
||||||
|
|
||||||
public CompletionTest_MacroRef_NoPrefix(String name) {
|
public CompletionTest_MacroRef_NoPrefix(String name) {
|
||||||
super(name);
|
super(name);
|
||||||
|
setExpectFailure(0); // no bugnumber, the test fails because I added additional macros that
|
||||||
|
// are reported by the CPreprocessor, but not by Scanner2. As soon as we switch over to the
|
||||||
|
// CPreprocessor, the test-case works again.
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Test suite() {
|
public static Test suite() {
|
||||||
|
|
|
@ -93,11 +93,21 @@ public class CElementHyperlinkDetector implements IHyperlinkDetector {
|
||||||
IASTName[] selectedNames=
|
IASTName[] selectedNames=
|
||||||
lang.getSelectedNames(ast, selection.getOffset(), selection.getLength());
|
lang.getSelectedNames(ast, selection.getOffset(), selection.getLength());
|
||||||
|
|
||||||
IRegion linkRegion;
|
IRegion linkRegion= null;
|
||||||
if(selectedNames.length > 0 && selectedNames[0] != null) { // found a name
|
if(selectedNames.length > 0 && selectedNames[0] != null) { // found a name
|
||||||
|
// prefer include statement over the include name
|
||||||
|
if (selectedNames[0].getParent() instanceof IASTPreprocessorIncludeStatement) {
|
||||||
|
IASTFileLocation loc= selectedNames[0].getParent().getFileLocation();
|
||||||
|
if (loc != null) {
|
||||||
|
linkRegion= new Region(loc.getNodeOffset(), loc.getNodeLength());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (linkRegion == null) {
|
||||||
linkRegion = new Region(selection.getOffset(), selection.getLength());
|
linkRegion = new Region(selection.getOffset(), selection.getLength());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
else { // check if we are in an include statement
|
else { // check if we are in an include statement
|
||||||
|
// mstodo- support for old scanner
|
||||||
linkRegion = matchIncludeStatement(ast, selection);
|
linkRegion = matchIncludeStatement(ast, selection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ import org.eclipse.jface.text.ITextInputListener;
|
||||||
import org.eclipse.jface.text.TypedPosition;
|
import org.eclipse.jface.text.TypedPosition;
|
||||||
import org.eclipse.swt.widgets.Display;
|
import org.eclipse.swt.widgets.Display;
|
||||||
|
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTNodeLocation;
|
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElifStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElifStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElseStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorElseStatement;
|
||||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorEndifStatement;
|
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorEndifStatement;
|
||||||
|
@ -232,21 +232,17 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
||||||
|
|
||||||
for (int i = 0; i < preprocStmts.length; i++) {
|
for (int i = 0; i < preprocStmts.length; i++) {
|
||||||
IASTPreprocessorStatement statement = preprocStmts[i];
|
IASTPreprocessorStatement statement = preprocStmts[i];
|
||||||
if (!fileName.equals(statement.getContainingFilename())) {
|
IASTFileLocation floc= statement.getFileLocation();
|
||||||
|
if (floc == null || !fileName.equals(floc.getFileName())) {
|
||||||
// preprocessor directive is from a different file
|
// preprocessor directive is from a different file
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
IASTNodeLocation[] nodeLocations = statement.getNodeLocations();
|
|
||||||
if (nodeLocations.length != 1) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
IASTNodeLocation stmtLocation= nodeLocations[0];
|
|
||||||
if (statement instanceof IASTPreprocessorIfStatement) {
|
if (statement instanceof IASTPreprocessorIfStatement) {
|
||||||
IASTPreprocessorIfStatement ifStmt = (IASTPreprocessorIfStatement)statement;
|
IASTPreprocessorIfStatement ifStmt = (IASTPreprocessorIfStatement)statement;
|
||||||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||||
if (!ifStmt.taken()) {
|
if (!ifStmt.taken()) {
|
||||||
if (!inInactiveCode) {
|
if (!inInactiveCode) {
|
||||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
inactiveCodeStart = floc.getNodeOffset();
|
||||||
inInactiveCode = true;
|
inInactiveCode = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -255,7 +251,7 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
||||||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||||
if (!ifdefStmt.taken()) {
|
if (!ifdefStmt.taken()) {
|
||||||
if (!inInactiveCode) {
|
if (!inInactiveCode) {
|
||||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
inactiveCodeStart = floc.getNodeOffset();
|
||||||
inInactiveCode = true;
|
inInactiveCode = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -264,27 +260,27 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
||||||
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
inactiveCodeStack.push(Boolean.valueOf(inInactiveCode));
|
||||||
if (!ifndefStmt.taken()) {
|
if (!ifndefStmt.taken()) {
|
||||||
if (!inInactiveCode) {
|
if (!inInactiveCode) {
|
||||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
inactiveCodeStart = floc.getNodeOffset();
|
||||||
inInactiveCode = true;
|
inInactiveCode = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (statement instanceof IASTPreprocessorElseStatement) {
|
} else if (statement instanceof IASTPreprocessorElseStatement) {
|
||||||
IASTPreprocessorElseStatement elseStmt = (IASTPreprocessorElseStatement)statement;
|
IASTPreprocessorElseStatement elseStmt = (IASTPreprocessorElseStatement)statement;
|
||||||
if (!elseStmt.taken() && !inInactiveCode) {
|
if (!elseStmt.taken() && !inInactiveCode) {
|
||||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
inactiveCodeStart = floc.getNodeOffset();
|
||||||
inInactiveCode = true;
|
inInactiveCode = true;
|
||||||
} else if (elseStmt.taken() && inInactiveCode) {
|
} else if (elseStmt.taken() && inInactiveCode) {
|
||||||
int inactiveCodeEnd = stmtLocation.getNodeOffset();
|
int inactiveCodeEnd = floc.getNodeOffset();
|
||||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
||||||
inInactiveCode = false;
|
inInactiveCode = false;
|
||||||
}
|
}
|
||||||
} else if (statement instanceof IASTPreprocessorElifStatement) {
|
} else if (statement instanceof IASTPreprocessorElifStatement) {
|
||||||
IASTPreprocessorElifStatement elifStmt = (IASTPreprocessorElifStatement)statement;
|
IASTPreprocessorElifStatement elifStmt = (IASTPreprocessorElifStatement)statement;
|
||||||
if (!elifStmt.taken() && !inInactiveCode) {
|
if (!elifStmt.taken() && !inInactiveCode) {
|
||||||
inactiveCodeStart = stmtLocation.getNodeOffset();
|
inactiveCodeStart = floc.getNodeOffset();
|
||||||
inInactiveCode = true;
|
inInactiveCode = true;
|
||||||
} else if (elifStmt.taken() && inInactiveCode) {
|
} else if (elifStmt.taken() && inInactiveCode) {
|
||||||
int inactiveCodeEnd = stmtLocation.getNodeOffset();
|
int inactiveCodeEnd = floc.getNodeOffset();
|
||||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, false, fHighlightKey));
|
||||||
inInactiveCode = false;
|
inInactiveCode = false;
|
||||||
}
|
}
|
||||||
|
@ -292,7 +288,7 @@ public class InactiveCodeHighlighting implements ICReconcilingListener, ITextInp
|
||||||
try {
|
try {
|
||||||
boolean wasInInactiveCode = ((Boolean)inactiveCodeStack.pop()).booleanValue();
|
boolean wasInInactiveCode = ((Boolean)inactiveCodeStack.pop()).booleanValue();
|
||||||
if (inInactiveCode && !wasInInactiveCode) {
|
if (inInactiveCode && !wasInInactiveCode) {
|
||||||
int inactiveCodeEnd = stmtLocation.getNodeOffset() + stmtLocation.getNodeLength();
|
int inactiveCodeEnd = floc.getNodeOffset() + floc.getNodeLength();
|
||||||
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, true, fHighlightKey));
|
positions.add(createHighlightPosition(inactiveCodeStart, inactiveCodeEnd, true, fHighlightKey));
|
||||||
}
|
}
|
||||||
inInactiveCode = wasInInactiveCode;
|
inInactiveCode = wasInInactiveCode;
|
||||||
|
|
Loading…
Add table
Reference in a new issue