mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
Additional testcases for macro-expansion plus fixes.
This commit is contained in:
parent
41731f51ac
commit
6ba6f4d584
17 changed files with 831 additions and 301 deletions
|
@ -16,7 +16,8 @@ bin.includes = plugin.xml,\
|
|||
META-INF/,\
|
||||
parser/org/eclipse/cdt/internal/index/tests/,\
|
||||
parser/org/eclipse/cdt/internal/pdom/tests/,\
|
||||
parser/org/eclipse/cdt/core/parser/tests/ast2/
|
||||
parser/org/eclipse/cdt/core/parser/tests/ast2/,\
|
||||
parser/org/eclipse/cdt/core/parser/tests/scanner/
|
||||
|
||||
output.cdtcoretests.jar = bin/
|
||||
source.cdtcoretests.jar = failures/,\
|
||||
|
|
|
@ -44,7 +44,7 @@ public class LexerTests extends BaseTestCase {
|
|||
|
||||
private void init(String input) throws Exception {
|
||||
fLog.clear();
|
||||
fLexer= new Lexer(input.toCharArray(), new LexerOptions(), fLog);
|
||||
fLexer= new Lexer(input.toCharArray(), new LexerOptions(), fLog, null);
|
||||
fLog.setInput(input);
|
||||
fLexer.nextToken();
|
||||
fLastEndOffset= 0;
|
||||
|
@ -55,7 +55,7 @@ public class LexerTests extends BaseTestCase {
|
|||
final LexerOptions lexerOptions = new LexerOptions();
|
||||
lexerOptions.fSupportDollarInitializers= dollar;
|
||||
lexerOptions.fSupportMinAndMax= minmax;
|
||||
fLexer= new Lexer(input.toCharArray(), lexerOptions, fLog);
|
||||
fLexer= new Lexer(input.toCharArray(), lexerOptions, fLog, null);
|
||||
fLexer.nextToken();
|
||||
fLastEndOffset= 0;
|
||||
}
|
||||
|
|
|
@ -11,177 +11,40 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.parser.tests.scanner;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.ComparisonFailure;
|
||||
import junit.framework.TestSuite;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ICodeReaderFactory;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
import org.eclipse.cdt.core.dom.parser.IScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.c.GCCScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.cpp.GPPScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.parser.CodeReader;
|
||||
import org.eclipse.cdt.core.parser.EndOfFileException;
|
||||
import org.eclipse.cdt.core.parser.IParserLogService;
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
import org.eclipse.cdt.core.parser.IScannerInfo;
|
||||
import org.eclipse.cdt.core.parser.ISourceElementRequestor;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.NullLogService;
|
||||
import org.eclipse.cdt.core.parser.NullSourceElementRequestor;
|
||||
import org.eclipse.cdt.core.parser.ParserLanguage;
|
||||
import org.eclipse.cdt.core.parser.ParserMode;
|
||||
import org.eclipse.cdt.core.parser.ScannerInfo;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
import org.eclipse.cdt.core.testplugin.util.BaseTestCase;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.ILocationResolver;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner2.FileCodeReaderFactory;
|
||||
|
||||
/**
|
||||
* Scanner2Tests ported to use the CPreprocessor
|
||||
*/
|
||||
public class PortedScannerTest extends BaseTestCase {
|
||||
private static final IParserLogService NULL_LOG = new NullLogService();
|
||||
|
||||
public static TestSuite suite() {
|
||||
return suite(PortedScannerTest.class);
|
||||
public class PortedScannerTests extends PreprocessorTestsBase {
|
||||
public static TestSuite suite() {
|
||||
return suite(PortedScannerTests.class);
|
||||
}
|
||||
|
||||
private CPreprocessor fScanner;
|
||||
private ILocationResolver fLocationResolver;
|
||||
|
||||
public PortedScannerTest() {
|
||||
public PortedScannerTests() {
|
||||
super();
|
||||
}
|
||||
|
||||
public PortedScannerTest(String name) {
|
||||
public PortedScannerTests(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input) throws IOException {
|
||||
initializeScanner(input, ParserMode.COMPLETE_PARSE);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserMode mode) throws IOException {
|
||||
initializeScanner(input, ParserLanguage.CPP, mode);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserLanguage lang) throws IOException {
|
||||
initializeScanner(input, lang, ParserMode.COMPLETE_PARSE);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserLanguage lang, ParserMode mode) throws IOException {
|
||||
ICodeReaderFactory readerFactory= FileCodeReaderFactory.getInstance();
|
||||
CodeReader reader= new CodeReader(input.toCharArray());
|
||||
IScannerExtensionConfiguration scannerConfig;
|
||||
IScannerInfo scannerInfo= new ScannerInfo();
|
||||
|
||||
if (lang == ParserLanguage.C) {
|
||||
scannerConfig= new GCCScannerExtensionConfiguration();
|
||||
}
|
||||
else {
|
||||
scannerConfig= new GPPScannerExtensionConfiguration();
|
||||
}
|
||||
|
||||
fScanner= new CPreprocessor(reader, scannerInfo, lang, NULL_LOG, scannerConfig, readerFactory);
|
||||
fLocationResolver= (ILocationResolver) fScanner.getAdapter(ILocationResolver.class);
|
||||
}
|
||||
|
||||
|
||||
protected int fullyTokenize() throws Exception {
|
||||
try {
|
||||
for(;;) {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertTrue(t.getType() <= IToken.tLAST);
|
||||
}
|
||||
}
|
||||
catch ( EndOfFileException e){
|
||||
}
|
||||
return fScanner.getCount();
|
||||
}
|
||||
|
||||
|
||||
protected void validateToken(int tokenType) throws Exception {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertEquals(tokenType, t.getType());
|
||||
}
|
||||
|
||||
protected void validateToken(int tokenType, String image) throws Exception {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertEquals(tokenType, t.getType());
|
||||
assertEquals(image, t.getImage());
|
||||
}
|
||||
|
||||
protected void validateInteger(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tINTEGER, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateIdentifier(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tIDENTIFIER, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateString(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tSTRING, "\"" + expectedImage + "\"");
|
||||
}
|
||||
|
||||
protected void validateChar(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tCHAR, "'" + expectedImage + "'");
|
||||
}
|
||||
|
||||
protected void validateWideChar(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tLCHAR, "L'" + expectedImage + "'");
|
||||
}
|
||||
|
||||
protected void validateLString(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tLSTRING, "L\"" + expectedImage + "\"");
|
||||
}
|
||||
|
||||
protected void validateFloatingPointLiteral(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tFLOATINGPT, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateEOF() throws Exception {
|
||||
try {
|
||||
IToken t= fScanner.nextToken();
|
||||
fail("superfluous token " + t);
|
||||
}
|
||||
catch(EndOfFileException e) {
|
||||
}
|
||||
}
|
||||
|
||||
private void assertCharArrayEquals(char[] expected, char[] actual) {
|
||||
if (!CharArrayUtils.equals(expected, actual))
|
||||
throw new ComparisonFailure(null, new String(expected), new String(actual));
|
||||
}
|
||||
|
||||
protected void validateDefinition(String name, String value) {
|
||||
Object expObject = fScanner.getRealDefinitions().get(name.toCharArray());
|
||||
assertNotNull(expObject);
|
||||
assertTrue(expObject instanceof IMacroBinding);
|
||||
assertCharArrayEquals(value.toCharArray(), ((IMacroBinding)expObject).getExpansion());
|
||||
}
|
||||
|
||||
protected void validateDefinition(String name, int value) {
|
||||
validateDefinition(name, String.valueOf(value));
|
||||
}
|
||||
|
||||
protected void validateAsUndefined(String name) {
|
||||
assertNull(fScanner.getDefinitions().get(name.toCharArray()));
|
||||
}
|
||||
|
||||
protected void validateProblemCount(int count) throws Exception {
|
||||
assertEquals(count, fLocationResolver.getScannerProblems().length);
|
||||
}
|
||||
|
||||
public void testBug102825_1() throws Exception {
|
||||
StringBuffer buffer = new StringBuffer(
|
||||
"#define CURLOPTTYPE_OBJECTPOINT 10000\n");
|
|
@ -0,0 +1,401 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2004, 2007 IBM Corporation and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* IBM - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.parser.tests.scanner;
|
||||
|
||||
import junit.framework.TestSuite;
|
||||
|
||||
import org.eclipse.cdt.core.parser.IProblem;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
|
||||
|
||||
/**
|
||||
* Scanner2Tests ported to use the CPreprocessor
|
||||
*/
|
||||
public class PreprocessorTests extends PreprocessorTestsBase {
|
||||
|
||||
public static TestSuite suite() {
|
||||
return suite(PreprocessorTests.class);
|
||||
}
|
||||
|
||||
// #define f(x) x+x
|
||||
// #define obj_f f
|
||||
// #define obj_fx f x
|
||||
// #define obj_fopen f (
|
||||
// obj_f
|
||||
// (y)
|
||||
// obj_f
|
||||
// y
|
||||
// obj_fx
|
||||
// (y)
|
||||
// obj_fopen y)
|
||||
public void testParenthesisOnNextLine() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("y");
|
||||
validateToken(IToken.tPLUS);
|
||||
validateIdentifier("y");
|
||||
|
||||
validateIdentifier("f");
|
||||
validateIdentifier("y");
|
||||
|
||||
validateIdentifier("f");
|
||||
validateIdentifier("x");
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("y");
|
||||
validateToken(IToken.tRPAREN);
|
||||
|
||||
validateIdentifier("y");
|
||||
validateToken(IToken.tPLUS);
|
||||
validateIdentifier("y");
|
||||
validateEOF();
|
||||
}
|
||||
|
||||
// #define f(x) x
|
||||
// f(f(x));
|
||||
// f(f);
|
||||
// f(f)(x);
|
||||
public void testRecursiveInArgument() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("x");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("f");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("f");
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("x");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
}
|
||||
|
||||
|
||||
// #define f(x) x
|
||||
// f(f(
|
||||
public void testMissingParenthesis() throws Exception {
|
||||
initializeScanner();
|
||||
validateEOF();
|
||||
}
|
||||
|
||||
// #define b(x) ok
|
||||
// #define step1 b
|
||||
// #define step2 step1 (x)
|
||||
// step2
|
||||
public void testSpaceBeforeParenthesis() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("ok");
|
||||
validateEOF();
|
||||
}
|
||||
|
||||
// #define m1(x) a1
|
||||
// #define m2(x...) a2
|
||||
// m1(1,2);
|
||||
// m2(1,2);
|
||||
public void testSuperfluousComma() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("a1");
|
||||
validateInteger("2");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("a2");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(1);
|
||||
validateProblem(0, IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, "m1");
|
||||
}
|
||||
|
||||
// #define str(x,y) #x#y
|
||||
// str(a,b );
|
||||
// str( a,b);
|
||||
// str(a a,b);
|
||||
// str(a, b);
|
||||
public void testSpaceInArgs() throws Exception {
|
||||
initializeScanner();
|
||||
validateString("ab");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString("ab");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString("a ab");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString("ab");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define str(x) #x
|
||||
// #define m0( ) a0
|
||||
// #define m1(x) str( .x. )
|
||||
// #define m2(x,y) str( .x.y. )
|
||||
// #define open0 m0(
|
||||
// #define open1 m1(
|
||||
// #define open2 m2(
|
||||
// open0 );
|
||||
// open1 a );
|
||||
// open2 a , b c );
|
||||
public void testSpaceInArgsViaOpenMacro() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("a0");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString(".a.");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString(".a.b c.");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define str(x) #x
|
||||
// #define m0( ) a0
|
||||
// #define m1(x) str(.x.)
|
||||
// #define m2(x,y) str(.x.y.)
|
||||
// #define _a a
|
||||
// #define _b b
|
||||
// #define _c c
|
||||
// #define use0 m0( )
|
||||
// #define use1 m1( _a )
|
||||
// #define use2 m2( _a , _b _c )
|
||||
// use0;
|
||||
// use1;
|
||||
// use2;
|
||||
public void testSpaceInArgsViaExpansion() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("a0");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString(".a.");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateString(".a.b c.");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define m0() a
|
||||
// m0;
|
||||
// m0();
|
||||
// m0( );
|
||||
// m0(x);
|
||||
public void testFunctionStyleWithoutArgs() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("m0");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(1);
|
||||
validateProblem(0, IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, "m0");
|
||||
}
|
||||
|
||||
// #define tp(x,y) #x##y
|
||||
// tp(a, );
|
||||
// tp(a,b);
|
||||
public void testStringifyAndPaste() throws Exception {
|
||||
initializeScanner();
|
||||
validateString("a");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(1);
|
||||
validateProblem(0, IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, "tp");
|
||||
}
|
||||
|
||||
// #define tp(x,y) x##y
|
||||
// tp(a, b c);
|
||||
// tp(a b,c);
|
||||
public void testPasteMultipleTokens() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("ab");
|
||||
validateIdentifier("c");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("a");
|
||||
validateIdentifier("bc");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define obj a b ## c ## d e
|
||||
// obj;
|
||||
public void testObjectStyleTokenPaste() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("a");
|
||||
validateIdentifier("bcd");
|
||||
validateIdentifier("e");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define variadic(x...) (a, ##x)
|
||||
// variadic();
|
||||
// variadic(b);
|
||||
// variadic(c,d);
|
||||
public void testGccVariadicMacroExtensions() throws Exception {
|
||||
initializeScanner();
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tCOMMA);
|
||||
validateIdentifier("b");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("a");
|
||||
validateToken(IToken.tCOMMA);
|
||||
validateIdentifier("c");
|
||||
validateToken(IToken.tCOMMA);
|
||||
validateIdentifier("d");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define str(x) #x
|
||||
// str();
|
||||
public void testEmptyStringify() throws Exception {
|
||||
initializeScanner();
|
||||
validateString("");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define tp(x,y) x##y
|
||||
// #define _p p
|
||||
// tp(_p,);
|
||||
// tp(_p, a);
|
||||
public void testRescanAfterTokenPaste() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("p");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("_pa");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define vararg(a, ...) (__VA_ARGS__)
|
||||
// vararg();
|
||||
// vararg( );
|
||||
// vararg(a);
|
||||
// vararg(a,b);
|
||||
// vararg(a, ,c);
|
||||
public void testVaargs() throws Exception {
|
||||
initializeScanner();
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateIdentifier("b");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateToken(IToken.tLPAREN);
|
||||
validateToken(IToken.tCOMMA);
|
||||
validateIdentifier("c");
|
||||
validateToken(IToken.tRPAREN);
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
// #define OBJ __VA_ARGS__
|
||||
// #define func(x) __VA_ARGS__
|
||||
// OBJ;
|
||||
// func(a);
|
||||
public void testVaargsWarning() throws Exception {
|
||||
initializeScanner();
|
||||
validateIdentifier("__VA_ARGS__");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateIdentifier("__VA_ARGS__");
|
||||
validateToken(IToken.tSEMI);
|
||||
validateEOF();
|
||||
// gcc actually warns about using __VA_ARGS__ in object-style macros too.
|
||||
validateProblemCount(1);
|
||||
validateProblem(0, IProblem.PREPROCESSOR_INVALID_VA_ARGS, null);
|
||||
}
|
||||
|
||||
// #define str(x) #x
|
||||
// #define _p p
|
||||
// #define obj str(_p) // str is expanded before _p is rescanned.
|
||||
// obj;
|
||||
public void testRescanOrder() throws Exception {
|
||||
initializeScanner();
|
||||
validateString("_p");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define obj #str
|
||||
// obj;
|
||||
public void testStringifyOperatorInObject() throws Exception {
|
||||
initializeScanner();
|
||||
validateToken(IToken.tPOUND);
|
||||
validateIdentifier("str");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
|
||||
// #define str(x) #x
|
||||
// #define open_str() str(a
|
||||
// open_str()b);
|
||||
public void testOpenStringify() throws Exception {
|
||||
initializeScanner();
|
||||
validateString("ab");
|
||||
validateToken(IToken.tSEMI);
|
||||
|
||||
validateEOF();
|
||||
validateProblemCount(0);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,181 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2007 Wind River Systems, Inc. and others.
|
||||
* All rights reserved. This program and the accompanying materials
|
||||
* are made available under the terms of the Eclipse Public License v1.0
|
||||
* which accompanies this distribution, and is available at
|
||||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.core.parser.tests.scanner;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.ComparisonFailure;
|
||||
|
||||
import org.eclipse.cdt.core.dom.ICodeReaderFactory;
|
||||
import org.eclipse.cdt.core.dom.ast.IASTProblem;
|
||||
import org.eclipse.cdt.core.dom.ast.IMacroBinding;
|
||||
import org.eclipse.cdt.core.dom.parser.IScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.c.GCCScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.dom.parser.cpp.GPPScannerExtensionConfiguration;
|
||||
import org.eclipse.cdt.core.parser.CodeReader;
|
||||
import org.eclipse.cdt.core.parser.EndOfFileException;
|
||||
import org.eclipse.cdt.core.parser.IParserLogService;
|
||||
import org.eclipse.cdt.core.parser.IScannerInfo;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.NullLogService;
|
||||
import org.eclipse.cdt.core.parser.ParserLanguage;
|
||||
import org.eclipse.cdt.core.parser.ParserMode;
|
||||
import org.eclipse.cdt.core.parser.ScannerInfo;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
import org.eclipse.cdt.core.testplugin.CTestPlugin;
|
||||
import org.eclipse.cdt.core.testplugin.util.BaseTestCase;
|
||||
import org.eclipse.cdt.core.testplugin.util.TestSourceReader;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.CPreprocessor;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner.ILocationResolver;
|
||||
import org.eclipse.cdt.internal.core.parser.scanner2.FileCodeReaderFactory;
|
||||
|
||||
public abstract class PreprocessorTestsBase extends BaseTestCase {
|
||||
|
||||
private static final IParserLogService NULL_LOG = new NullLogService();
|
||||
protected CPreprocessor fScanner;
|
||||
protected ILocationResolver fLocationResolver;
|
||||
|
||||
public PreprocessorTestsBase(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
public PreprocessorTestsBase() {
|
||||
super();
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input) throws IOException {
|
||||
initializeScanner(input, ParserMode.COMPLETE_PARSE);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserMode mode) throws IOException {
|
||||
initializeScanner(input, ParserLanguage.CPP, mode);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserLanguage lang) throws IOException {
|
||||
initializeScanner(input, lang, ParserMode.COMPLETE_PARSE);
|
||||
}
|
||||
|
||||
protected void initializeScanner(String input, ParserLanguage lang, ParserMode mode) throws IOException {
|
||||
ICodeReaderFactory readerFactory= FileCodeReaderFactory.getInstance();
|
||||
CodeReader reader= new CodeReader(input.toCharArray());
|
||||
IScannerExtensionConfiguration scannerConfig;
|
||||
IScannerInfo scannerInfo= new ScannerInfo();
|
||||
|
||||
if (lang == ParserLanguage.C) {
|
||||
scannerConfig= new GCCScannerExtensionConfiguration();
|
||||
}
|
||||
else {
|
||||
scannerConfig= new GPPScannerExtensionConfiguration();
|
||||
}
|
||||
|
||||
fScanner= new CPreprocessor(reader, scannerInfo, lang, NULL_LOG, scannerConfig, readerFactory);
|
||||
fLocationResolver= (ILocationResolver) fScanner.getAdapter(ILocationResolver.class);
|
||||
}
|
||||
|
||||
protected void initializeScanner() throws Exception {
|
||||
StringBuffer[] input= TestSourceReader.getContentsForTest(
|
||||
CTestPlugin.getDefault().getBundle(), "parser", getClass(), getName(), 1);
|
||||
initializeScanner(input[0].toString());
|
||||
}
|
||||
|
||||
protected int fullyTokenize() throws Exception {
|
||||
try {
|
||||
for(;;) {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertTrue(t.getType() <= IToken.tLAST);
|
||||
}
|
||||
}
|
||||
catch ( EndOfFileException e){
|
||||
}
|
||||
return fScanner.getCount();
|
||||
}
|
||||
|
||||
protected void validateToken(int tokenType) throws Exception {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertEquals(tokenType, t.getType());
|
||||
}
|
||||
|
||||
protected void validateToken(int tokenType, String image) throws Exception {
|
||||
IToken t= fScanner.nextToken();
|
||||
assertEquals(tokenType, t.getType());
|
||||
assertEquals(image, t.getImage());
|
||||
}
|
||||
|
||||
protected void validateInteger(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tINTEGER, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateIdentifier(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tIDENTIFIER, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateString(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tSTRING, "\"" + expectedImage + "\"");
|
||||
}
|
||||
|
||||
protected void validateChar(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tCHAR, "'" + expectedImage + "'");
|
||||
}
|
||||
|
||||
protected void validateWideChar(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tLCHAR, "L'" + expectedImage + "'");
|
||||
}
|
||||
|
||||
protected void validateLString(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tLSTRING, "L\"" + expectedImage + "\"");
|
||||
}
|
||||
|
||||
protected void validateFloatingPointLiteral(String expectedImage) throws Exception {
|
||||
validateToken(IToken.tFLOATINGPT, expectedImage);
|
||||
}
|
||||
|
||||
protected void validateEOF() throws Exception {
|
||||
try {
|
||||
IToken t= fScanner.nextToken();
|
||||
fail("superfluous token " + t);
|
||||
}
|
||||
catch(EndOfFileException e) {
|
||||
}
|
||||
}
|
||||
|
||||
private void assertCharArrayEquals(char[] expected, char[] actual) {
|
||||
if (!CharArrayUtils.equals(expected, actual))
|
||||
throw new ComparisonFailure(null, new String(expected), new String(actual));
|
||||
}
|
||||
|
||||
protected void validateDefinition(String name, String value) {
|
||||
Object expObject = fScanner.getDefinitions().get(name);
|
||||
assertNotNull(expObject);
|
||||
assertTrue(expObject instanceof IMacroBinding);
|
||||
assertCharArrayEquals(value.toCharArray(), ((IMacroBinding)expObject).getExpansion());
|
||||
}
|
||||
|
||||
protected void validateDefinition(String name, int value) {
|
||||
validateDefinition(name, String.valueOf(value));
|
||||
}
|
||||
|
||||
protected void validateAsUndefined(String name) {
|
||||
assertNull(fScanner.getDefinitions().get(name.toCharArray()));
|
||||
}
|
||||
|
||||
protected void validateProblemCount(int count) throws Exception {
|
||||
assertEquals(count, fLocationResolver.getScannerProblems().length);
|
||||
}
|
||||
|
||||
protected void validateProblem(int idx, int problemID, String detail) throws Exception {
|
||||
IASTProblem problem= fLocationResolver.getScannerProblems()[idx];
|
||||
assertEquals(problemID, problem.getID());
|
||||
if (detail != null) {
|
||||
assertEquals(detail, problem.getArguments());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -19,7 +19,8 @@ public class ScannerTestSuite extends TestSuite {
|
|||
TestSuite suite= new ScannerTestSuite();
|
||||
suite.addTest(LexerTests.suite());
|
||||
suite.addTest(LocationMapTests.suite());
|
||||
suite.addTest(PortedScannerTest.suite());
|
||||
suite.addTest(PortedScannerTests.suite());
|
||||
suite.addTest(PreprocessorTests.suite());
|
||||
return suite;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,9 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
public static final int tDEFINED= IToken.FIRST_RESERVED_PREPROCESSOR;
|
||||
public static final int tEXPANDED_IDENTIFIER= IToken.FIRST_RESERVED_PREPROCESSOR+1;
|
||||
public static final int tSCOPE_MARKER= IToken.FIRST_RESERVED_PREPROCESSOR+2;
|
||||
public static final int tMACRO_PARAMETER= IToken.FIRST_RESERVED_PREPROCESSOR+3;
|
||||
public static final int tSPACE= IToken.FIRST_RESERVED_PREPROCESSOR+3;
|
||||
public static final int tMACRO_PARAMETER= IToken.FIRST_RESERVED_PREPROCESSOR+4;
|
||||
public static final int tEMPTY_TOKEN = IToken.FIRST_RESERVED_PREPROCESSOR+5;
|
||||
|
||||
|
||||
|
||||
|
@ -105,7 +107,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
StringBuffer buffer = new StringBuffer("\""); //$NON-NLS-1$
|
||||
buffer.append(getCurrentFilename());
|
||||
buffer.append('\"');
|
||||
return new ImageToken(IToken.tSTRING, 0, 0, buffer.toString().toCharArray());
|
||||
return new ImageToken(IToken.tSTRING, null, 0, 0, buffer.toString().toCharArray());
|
||||
}
|
||||
};
|
||||
final private DynamicStyleMacro __DATE__= new DynamicStyleMacro("__DATE__".toCharArray()) { //$NON-NLS-1$
|
||||
|
@ -124,7 +126,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
buffer.append(" "); //$NON-NLS-1$
|
||||
buffer.append(cal.get(Calendar.YEAR));
|
||||
buffer.append("\""); //$NON-NLS-1$
|
||||
return new ImageToken(IToken.tSTRING, 0, 0, buffer.toString().toCharArray());
|
||||
return new ImageToken(IToken.tSTRING, null, 0, 0, buffer.toString().toCharArray());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -144,14 +146,14 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
buffer.append(":"); //$NON-NLS-1$
|
||||
append(buffer, cal.get(Calendar.SECOND));
|
||||
buffer.append("\""); //$NON-NLS-1$
|
||||
return new ImageToken(IToken.tSTRING, 0, 0, buffer.toString().toCharArray());
|
||||
return new ImageToken(IToken.tSTRING, null, 0, 0, buffer.toString().toCharArray());
|
||||
}
|
||||
};
|
||||
|
||||
final private DynamicStyleMacro __LINE__ = new DynamicStyleMacro("__LINE__".toCharArray()) { //$NON-NLS-1$
|
||||
public Token execute() {
|
||||
int lineNumber= fLocationMap.getCurrentLineNumber(fCurrentContext.currentLexerToken().getOffset());
|
||||
return new ImageToken(IToken.tINTEGER, 0, 0, Long.toString(lineNumber).toCharArray());
|
||||
return new ImageToken(IToken.tINTEGER, null, 0, 0, Long.toString(lineNumber).toCharArray());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -216,7 +218,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
final String filePath= new String(reader.filename);
|
||||
fAllIncludedFiles.add(filePath);
|
||||
ILocationCtx ctx= fLocationMap.pushTranslationUnit(filePath, reader.buffer);
|
||||
fRootLexer= new Lexer(reader.buffer, (LexerOptions) fLexOptions.clone(), this);
|
||||
fRootLexer= new Lexer(reader.buffer, (LexerOptions) fLexOptions.clone(), this, this);
|
||||
fRootContext= fCurrentContext= new ScannerContextFile(ctx, null, fRootLexer);
|
||||
if (info instanceof IExtendedScannerInfo) {
|
||||
final IExtendedScannerInfo einfo= (IExtendedScannerInfo) info;
|
||||
|
@ -319,13 +321,13 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
if (preIncludedFiles != null && preIncludedFiles.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(preIncludedFiles);
|
||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, false);
|
||||
fCurrentContext= new ScannerContextFile(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this));
|
||||
fCurrentContext= new ScannerContextFile(ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
}
|
||||
|
||||
if (macroFiles != null && macroFiles.length > 0) {
|
||||
final char[] buffer= createSyntheticFile(macroFiles);
|
||||
ILocationCtx ctx= fLocationMap.pushPreInclusion(buffer, 0, true);
|
||||
fCurrentContext= new ScannerContextMacroFile(this, ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this));
|
||||
fCurrentContext= new ScannerContextMacroFile(this, ctx, fCurrentContext, new Lexer(buffer, fLexOptions, this, this));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,7 +365,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
// }
|
||||
|
||||
public PreprocessorMacro addMacroDefinition(char[] key, char[] value) {
|
||||
final Lexer lex= new Lexer(key, fLexOptions, LEXERLOG_NULL);
|
||||
final Lexer lex= new Lexer(key, fLexOptions, LEXERLOG_NULL, null);
|
||||
try {
|
||||
PreprocessorMacro result= fMacroDefinitionParser.parseMacroDefinition(lex, LEXERLOG_NULL, value);
|
||||
fLocationMap.registerPredefinedMacro(result);
|
||||
|
@ -381,7 +383,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
|
||||
public Map getDefinitions() {
|
||||
CharArrayObjectMap objMap = getRealDefinitions();
|
||||
final CharArrayObjectMap objMap= fMacroDictionary;
|
||||
int size = objMap.size();
|
||||
Map hashMap = new HashMap(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -391,10 +393,6 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
return hashMap;
|
||||
}
|
||||
|
||||
public CharArrayObjectMap getRealDefinitions() {
|
||||
return fMacroDictionary;
|
||||
}
|
||||
|
||||
public String[] getIncludePaths() {
|
||||
return fIncludePaths;
|
||||
}
|
||||
|
@ -434,7 +432,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
if (fContentAssistLimit < 0) {
|
||||
throw new EndOfFileException();
|
||||
}
|
||||
t1= new SimpleToken(IToken.tEOC, fContentAssistLimit, fContentAssistLimit);
|
||||
t1= new SimpleToken(IToken.tEOC, null, fContentAssistLimit, fContentAssistLimit);
|
||||
break;
|
||||
case IToken.tSTRING:
|
||||
case IToken.tLSTRING:
|
||||
|
@ -472,7 +470,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
image[++off]= '"';
|
||||
buf.getChars(0, buf.length(), image, ++off);
|
||||
image[image.length-1]= '"';
|
||||
t1= new ImageToken((isWide ? IToken.tLSTRING : IToken.tSTRING), t1.getOffset(), endOffset, image);
|
||||
t1= new ImageToken((isWide ? IToken.tLSTRING : IToken.tSTRING), null, t1.getOffset(), endOffset, image);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1003,7 +1001,7 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
reported= true;
|
||||
fAllIncludedFiles.add(path);
|
||||
ILocationCtx ctx= fLocationMap.pushInclusion(poundOffset, nameOffset, nameEndOffset, endOffset, reader.buffer, path, headerName, userInclude);
|
||||
ScannerContextFile fctx= new ScannerContextFile(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this));
|
||||
ScannerContextFile fctx= new ScannerContextFile(ctx, fCurrentContext, new Lexer(reader.buffer, fLexOptions, this, this));
|
||||
fCurrentContext= fctx;
|
||||
}
|
||||
}
|
||||
|
@ -1329,6 +1327,9 @@ public class CPreprocessor implements ILexerLog, IScanner {
|
|||
}
|
||||
|
||||
// stuff to be removed
|
||||
public CharArrayObjectMap getRealDefinitions() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public void addDefinition(IMacro macro) {
|
||||
addMacroDefinition(macro.getSignature(), macro.getExpansion());
|
||||
}
|
||||
|
|
|
@ -272,7 +272,7 @@ class ExpressionEvaluator {
|
|||
private void consume() {
|
||||
fTokens= (Token) fTokens.getNext();
|
||||
if (fTokens == null) {
|
||||
fTokens= new SimpleToken(Lexer.tEND_OF_INPUT, 0, 0);
|
||||
fTokens= new SimpleToken(Lexer.tEND_OF_INPUT, null, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,6 +63,7 @@ final public class Lexer {
|
|||
// configuration
|
||||
private final LexerOptions fOptions;
|
||||
private final ILexerLog fLog;
|
||||
private final Object fSource;
|
||||
|
||||
// the input to the lexer
|
||||
private final char[] fInput;
|
||||
|
@ -84,20 +85,28 @@ final public class Lexer {
|
|||
private boolean fFirstTokenAfterNewline= true;
|
||||
|
||||
|
||||
public Lexer(char[] input, LexerOptions options, ILexerLog log) {
|
||||
this(input, 0, input.length, options, log);
|
||||
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
||||
this(input, 0, input.length, options, log, source);
|
||||
}
|
||||
|
||||
public Lexer(char[] input, int start, int end, LexerOptions options, ILexerLog log) {
|
||||
public Lexer(char[] input, int start, int end, LexerOptions options, ILexerLog log, Object source) {
|
||||
fInput= input;
|
||||
fStart= fOffset= fEndOffset= start;
|
||||
fLimit= end;
|
||||
fOptions= options;
|
||||
fLog= log;
|
||||
fToken= new SimpleToken(tBEFORE_INPUT, start, start);
|
||||
fSource= source;
|
||||
fToken= new SimpleToken(tBEFORE_INPUT, source, start, start);
|
||||
nextCharPhase3();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the source that is attached to the tokens generated by this lexer
|
||||
*/
|
||||
public Object getSource() {
|
||||
return fSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the lexer to the first char and prepares for content-assist mode.
|
||||
*/
|
||||
|
@ -542,20 +551,20 @@ final public class Lexer {
|
|||
}
|
||||
|
||||
private Token newToken(int kind, int offset) {
|
||||
return new SimpleToken(kind, offset, fOffset);
|
||||
return new SimpleToken(kind, fSource, offset, fOffset);
|
||||
}
|
||||
|
||||
private Token newDigraphToken(int kind, int offset) {
|
||||
return new DigraphToken(kind, offset, fOffset);
|
||||
return new DigraphToken(kind, fSource, offset, fOffset);
|
||||
}
|
||||
|
||||
private Token newToken(int kind, int offset, int imageLength) {
|
||||
final int endOffset= fOffset;
|
||||
int sourceLen= endOffset-offset;
|
||||
if (sourceLen != imageLength) {
|
||||
return new ImageToken(kind, offset, endOffset, getCharImage(offset, endOffset, imageLength));
|
||||
return new ImageToken(kind, fSource, offset, endOffset, getCharImage(offset, endOffset, imageLength));
|
||||
}
|
||||
return new SourceImageToken(kind, offset, endOffset, fInput);
|
||||
return new SourceImageToken(kind, fSource, offset, endOffset, fInput);
|
||||
}
|
||||
|
||||
private void handleProblem(int problemID, char[] arg, int offset) {
|
||||
|
|
|
@ -212,7 +212,7 @@ class MacroDefinitionParser {
|
|||
final char[] image = candidate.getCharImage();
|
||||
int idx= CharArrayUtils.indexOf(image, paramList);
|
||||
if (idx >= 0) {
|
||||
candidate= new PlaceHolderToken(CPreprocessor.tMACRO_PARAMETER, idx, candidate.getOffset(), candidate.getEndOffset(), paramList[idx]);
|
||||
candidate= new PlaceHolderToken(CPreprocessor.tMACRO_PARAMETER, idx, lexer.getSource(), candidate.getOffset(), candidate.getEndOffset(), paramList[idx]);
|
||||
needParam= false;
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -34,12 +34,12 @@ public class MacroExpander {
|
|||
* operation across such boundaries.
|
||||
*/
|
||||
public static final class ExpansionBoundary extends Token {
|
||||
private PreprocessorMacro fScope;
|
||||
private boolean fIsStart;
|
||||
private final PreprocessorMacro fMacro;
|
||||
|
||||
ExpansionBoundary(PreprocessorMacro scope, int offset, boolean isStart) {
|
||||
super(CPreprocessor.tSCOPE_MARKER, offset, offset);
|
||||
fScope= scope;
|
||||
ExpansionBoundary(PreprocessorMacro scope, boolean isStart) {
|
||||
super(CPreprocessor.tSCOPE_MARKER, null, 0, 0);
|
||||
fMacro= scope;
|
||||
fIsStart= isStart;
|
||||
}
|
||||
|
||||
|
@ -48,29 +48,19 @@ public class MacroExpander {
|
|||
}
|
||||
|
||||
public String toString() {
|
||||
return "{" + (fIsStart ? '+' : '-') + //$NON-NLS-1$
|
||||
(fScope == null ? String.valueOf(getOffset()) : fScope.getName()) + '}';
|
||||
return "{" + (fIsStart ? '+' : '-') + fMacro.getName() + '}'; //$NON-NLS-1$
|
||||
}
|
||||
|
||||
public void execute(IdentityHashMap forbidden) {
|
||||
if (fIsStart) {
|
||||
forbidden.put(fScope, fScope);
|
||||
forbidden.put(fMacro, fMacro);
|
||||
}
|
||||
else {
|
||||
forbidden.remove(fScope);
|
||||
forbidden.remove(fMacro);
|
||||
}
|
||||
fScope= null;
|
||||
}
|
||||
|
||||
public Object clone() {
|
||||
// when cloned for the purpose of argument substitution, the boundaries no longer prevent a
|
||||
// recursive macro expansion.
|
||||
ExpansionBoundary t= (ExpansionBoundary) super.clone();
|
||||
t.fScope= null;
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Combines a list of tokens with the preprocessor to form the input for macro expansion.
|
||||
*/
|
||||
|
@ -78,7 +68,7 @@ public class MacroExpander {
|
|||
private boolean fUseCpp;
|
||||
|
||||
public TokenSource(boolean useCpp) {
|
||||
fUseCpp= true;
|
||||
fUseCpp= useCpp;
|
||||
}
|
||||
|
||||
public Token fetchFirst() throws OffsetLimitReachedException {
|
||||
|
@ -94,6 +84,7 @@ public class MacroExpander {
|
|||
Token t= first();
|
||||
while (t != null) {
|
||||
switch (t.getType()) {
|
||||
case CPreprocessor.tSPACE:
|
||||
case Lexer.tNEWLINE:
|
||||
break;
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
|
@ -146,7 +137,8 @@ public class MacroExpander {
|
|||
|
||||
// setup input sequence
|
||||
TokenSource input= new TokenSource(true);
|
||||
TokenList firstExpansion= expandOne(macro, forbidden, input, fStartOffset, fEndOffset);
|
||||
TokenList firstExpansion= new TokenList();
|
||||
expandOne(identifier, macro, forbidden, input, firstExpansion);
|
||||
input.prepend(firstExpansion);
|
||||
|
||||
expandAll(input, forbidden, expansion);
|
||||
|
@ -155,14 +147,14 @@ public class MacroExpander {
|
|||
|
||||
/**
|
||||
* Expects that the identifier of the macro expansion has been consumed.
|
||||
* Returns the last token of the expansion.
|
||||
*/
|
||||
private TokenList expandOne(PreprocessorMacro macro, IdentityHashMap forbidden, TokenSource input, int offset, int endOffset)
|
||||
private Token expandOne(Token lastConsumed, PreprocessorMacro macro, IdentityHashMap forbidden, TokenSource input, TokenList result)
|
||||
throws OffsetLimitReachedException {
|
||||
TokenList result= new TokenList();
|
||||
result.append(new ExpansionBoundary(macro, offset, true));
|
||||
result.append(new ExpansionBoundary(macro, true));
|
||||
if (macro.isFunctionStyle()) {
|
||||
final TokenSource[] argInputs= new TokenSource[macro.getParameterPlaceholderList().length];
|
||||
endOffset= parseArguments(input, (FunctionStyleMacro) macro, argInputs);
|
||||
lastConsumed= parseArguments(input, (FunctionStyleMacro) macro, forbidden, argInputs);
|
||||
TokenList[] clonedArgs= new TokenList[argInputs.length];
|
||||
TokenList[] expandedArgs= new TokenList[argInputs.length];
|
||||
for (int i = 0; i < argInputs.length; i++) {
|
||||
|
@ -177,17 +169,17 @@ public class MacroExpander {
|
|||
else {
|
||||
objStyleTokenPaste(macro, macro.getTokens(fDefinitionParser, fLexOptions), result);
|
||||
}
|
||||
result.append(new ExpansionBoundary(macro, endOffset, false));
|
||||
return result;
|
||||
result.append(new ExpansionBoundary(macro, false));
|
||||
return lastConsumed;
|
||||
}
|
||||
|
||||
private void expandAll(TokenSource input, IdentityHashMap forbidden, TokenList result) throws OffsetLimitReachedException {
|
||||
Token l= null;
|
||||
Token t= input.removeFirst();
|
||||
while(t != null) {
|
||||
switch(t.getType()) {
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
((ExpansionBoundary) t).execute(forbidden);
|
||||
result.append(t);
|
||||
break;
|
||||
case IToken.tIDENTIFIER:
|
||||
PreprocessorMacro macro= (PreprocessorMacro) fDictionary.get(t.getCharImage());
|
||||
|
@ -196,7 +188,15 @@ public class MacroExpander {
|
|||
if (!isFunctionStyle || input.findLParenthesis(forbidden)) {
|
||||
// mstodo- image location
|
||||
fImplicitMacroExpansions.add(fLocationMap.encounterImplicitMacroExpansion(macro, null));
|
||||
TokenList replacement= expandOne(macro, forbidden, input, t.getOffset(), t.getEndOffset());
|
||||
TokenList replacement= new TokenList();
|
||||
if (l != null && l.hasGap(t)) {
|
||||
replacement.append(space());
|
||||
}
|
||||
Token last= expandOne(t, macro, forbidden, input, replacement);
|
||||
Token n= input.first();
|
||||
if (n != null && last.hasGap(n)) {
|
||||
replacement.append(space());
|
||||
}
|
||||
input.prepend(replacement);
|
||||
t= null;
|
||||
}
|
||||
|
@ -210,31 +210,38 @@ public class MacroExpander {
|
|||
result.append(t);
|
||||
break;
|
||||
}
|
||||
l= t;
|
||||
t= input.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that the identifier has been consumed.
|
||||
* @param forbidden
|
||||
* @throws OffsetLimitReachedException
|
||||
*/
|
||||
private int parseArguments(TokenSource input, FunctionStyleMacro macro, TokenSource[] result) throws OffsetLimitReachedException {
|
||||
private Token parseArguments(TokenSource input, FunctionStyleMacro macro, IdentityHashMap forbidden, TokenSource[] result) throws OffsetLimitReachedException {
|
||||
final int argCount= macro.getParameterPlaceholderList().length;
|
||||
final boolean hasVarargs= macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS;
|
||||
final int requiredArgs= hasVarargs ? argCount-1 : argCount;
|
||||
int endOffset= 0;
|
||||
int idx= 0;
|
||||
int nesting = -1;
|
||||
int nesting= 0;
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
result[i]= new TokenSource(false);
|
||||
}
|
||||
|
||||
Token lastToken= input.fetchFirst();
|
||||
assert lastToken != null && lastToken.getType() == IToken.tLPAREN;
|
||||
|
||||
boolean complete= false;
|
||||
boolean isFirstOfArg= true;
|
||||
Token space= null;
|
||||
loop: while (true) {
|
||||
Token t= input.fetchFirst();
|
||||
if (t == null) {
|
||||
break loop;
|
||||
}
|
||||
endOffset= t.getEndOffset();
|
||||
lastToken= t;
|
||||
switch(t.getType()) {
|
||||
case Lexer.tEND_OF_INPUT:
|
||||
if (fCompletionMode) {
|
||||
|
@ -246,51 +253,63 @@ public class MacroExpander {
|
|||
|
||||
case Lexer.tNEWLINE:
|
||||
assert false; // we should not get any newlines from macros or the preprocessor.
|
||||
break;
|
||||
continue loop;
|
||||
|
||||
case IToken.tLPAREN:
|
||||
if (++nesting > 0) {
|
||||
result[idx].append(t);
|
||||
}
|
||||
++nesting;
|
||||
break;
|
||||
|
||||
case IToken.tRPAREN:
|
||||
if (--nesting < 0) {
|
||||
idx++;
|
||||
complete= true;
|
||||
break loop;
|
||||
}
|
||||
result[idx].append(t);
|
||||
break;
|
||||
|
||||
case IToken.tCOMMA:
|
||||
if (nesting == 0) {
|
||||
if (idx < argCount-1) { // next argument
|
||||
isFirstOfArg= true;
|
||||
space= null;
|
||||
idx++;
|
||||
break;
|
||||
continue loop;
|
||||
}
|
||||
else if (!hasVarargs) {
|
||||
// too many arguments
|
||||
handleProblem(IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, macro.getNameCharArray());
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
// part of argument
|
||||
result[idx].append(t);
|
||||
break;
|
||||
|
||||
default:
|
||||
if (nesting < 0) {
|
||||
assert false; // no leading parenthesis, which is checked before the method is called.
|
||||
break loop;
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
if (argCount == 0) {
|
||||
((ExpansionBoundary) t).execute(forbidden);
|
||||
}
|
||||
result[idx].append(t);
|
||||
break;
|
||||
else {
|
||||
result[idx].append(t);
|
||||
}
|
||||
continue loop;
|
||||
|
||||
case CPreprocessor.tSPACE:
|
||||
if (!isFirstOfArg) {
|
||||
space= t;
|
||||
}
|
||||
continue loop;
|
||||
}
|
||||
if (argCount == 0) {
|
||||
break loop;
|
||||
}
|
||||
if (space != null) {
|
||||
result[idx].append(space);
|
||||
space= null;
|
||||
}
|
||||
result[idx].append(t);
|
||||
isFirstOfArg= false;
|
||||
}
|
||||
|
||||
if (idx < requiredArgs) {
|
||||
handleProblem(IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, macro.getNameCharArray());
|
||||
}
|
||||
return endOffset;
|
||||
|
||||
if (!complete || idx+1 < requiredArgs) {
|
||||
handleProblem(IProblem.PREPROCESSOR_MACRO_USAGE_ERROR, macro.getNameCharArray());
|
||||
}
|
||||
return lastToken;
|
||||
}
|
||||
|
||||
private void handleProblem(int problemID, char[] arg) {
|
||||
|
@ -300,22 +319,32 @@ public class MacroExpander {
|
|||
private void replaceArgs(PreprocessorMacro macro, TokenList[] args, TokenList[] expandedArgs, TokenList result) {
|
||||
TokenList input= macro.getTokens(fDefinitionParser, fLexOptions);
|
||||
|
||||
Token l= null;
|
||||
Token n;
|
||||
Token pasteArg1= null;
|
||||
for (Token t= input.first(); t != null; t=n) {
|
||||
for (Token t= input.first(); t != null; l=t, t=n) {
|
||||
n= (Token) t.getNext();
|
||||
boolean pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||
|
||||
switch(t.getType()) {
|
||||
case CPreprocessor.tMACRO_PARAMETER:
|
||||
if (l != null && l.hasGap(t)) {
|
||||
result.append(space());
|
||||
}
|
||||
int idx= ((PlaceHolderToken) t).getIndex();
|
||||
if (idx < args.length) { // be defensive
|
||||
TokenList arg= pasteNext ? args[idx] : expandedArgs[idx];
|
||||
pasteArg1= cloneAndAppend(arg.first(), result, pasteNext);
|
||||
}
|
||||
if (n != null && t.hasGap(n)) {
|
||||
result.append(space());
|
||||
}
|
||||
break;
|
||||
|
||||
case IToken.tPOUND:
|
||||
if (l != null && l.hasGap(t)) {
|
||||
result.append(space());
|
||||
}
|
||||
StringBuffer buf= new StringBuffer();
|
||||
buf.append('"');
|
||||
if (n != null && n.getType() == CPreprocessor.tMACRO_PARAMETER) {
|
||||
|
@ -323,6 +352,7 @@ public class MacroExpander {
|
|||
if (idx < args.length) { // be defensive
|
||||
stringify(args[idx], buf);
|
||||
}
|
||||
t= n;
|
||||
n= (Token) n.getNext();
|
||||
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||
}
|
||||
|
@ -331,7 +361,10 @@ public class MacroExpander {
|
|||
final char[] image= new char[length];
|
||||
buf.getChars(0, length, image, 0);
|
||||
|
||||
pasteArg1= appendToResult(new ImageToken(IToken.tSTRING, 0, 0, image), result, pasteNext);
|
||||
pasteArg1= appendToResult(new ImageToken(IToken.tSTRING, null, 0, 0, image), result, pasteNext);
|
||||
if (!pasteNext && n != null && t.hasGap(n)) {
|
||||
result.append(space());
|
||||
}
|
||||
break;
|
||||
|
||||
case IToken.tPOUNDPOUND:
|
||||
|
@ -347,22 +380,39 @@ public class MacroExpander {
|
|||
if (pasteArg2 != null) {
|
||||
rest= (Token) pasteArg2.getNext();
|
||||
}
|
||||
|
||||
// gcc-extension
|
||||
if (idx == args.length-1 && macro.hasVarArgs() != FunctionStyleMacro.NO_VAARGS) {
|
||||
if (pasteArg1.getType() == IToken.tCOMMA) {
|
||||
if (pasteArg2 == null) {
|
||||
pasteArg1= null;
|
||||
}
|
||||
else {
|
||||
pasteArg2.setNext(rest);
|
||||
rest= pasteArg2;
|
||||
pasteArg2= null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
pasteArg2= n;
|
||||
}
|
||||
t= n;
|
||||
n= (Token) n.getNext();
|
||||
pasteNext= n != null && n.getType() == IToken.tPOUNDPOUND;
|
||||
}
|
||||
|
||||
t= tokenpaste(pasteArg1, pasteArg2, macro);
|
||||
if (t != null) {
|
||||
pasteArg1= appendToResult((Token) t.clone(), result, pasteNext && rest == null);
|
||||
Token tp= tokenpaste(pasteArg1, pasteArg2, macro);
|
||||
if (tp != null) {
|
||||
pasteArg1= appendToResult((Token) tp.clone(), result, pasteNext && rest == null);
|
||||
}
|
||||
if (rest != null) {
|
||||
pasteArg1= cloneAndAppend(rest, result, pasteNext);
|
||||
}
|
||||
if (!pasteNext && n != null && t.hasGap(n)) {
|
||||
result.append(space());
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -373,6 +423,10 @@ public class MacroExpander {
|
|||
}
|
||||
}
|
||||
|
||||
private SimpleToken space() {
|
||||
return new SimpleToken(CPreprocessor.tSPACE, null, 0, 0);
|
||||
}
|
||||
|
||||
private void objStyleTokenPaste(PreprocessorMacro macro, TokenList input, TokenList result) {
|
||||
Token n;
|
||||
Token pasteArg1= null;
|
||||
|
@ -414,27 +468,31 @@ public class MacroExpander {
|
|||
|
||||
private Token cloneAndAppend(Token tokens, TokenList result, boolean pasteNext) {
|
||||
Token t= tokens;
|
||||
Token r= t == null ? null : (Token) t.getNext();
|
||||
while (r != null) {
|
||||
if (t == null) {
|
||||
return null;
|
||||
}
|
||||
Token n= (Token) t.getNext();
|
||||
Token p= null;
|
||||
while (n != null) {
|
||||
result.append((Token) t.clone());
|
||||
t= r;
|
||||
r= (Token) r.getNext();
|
||||
p= t;
|
||||
t= n;
|
||||
n= (Token) n.getNext();
|
||||
}
|
||||
if (t != null && !pasteNext) {
|
||||
result.append((Token) t.clone());
|
||||
return null;
|
||||
}
|
||||
if (p != null && p.hasGap(t)) {
|
||||
result.append(space());
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
private Token tokenpaste(Token arg1, Token arg2, PreprocessorMacro macro) {
|
||||
if (arg2 == null) {
|
||||
if (arg1.getType() == IToken.tCOMMA) { // gcc-extension for variadic macros
|
||||
return null;
|
||||
}
|
||||
return arg1;
|
||||
}
|
||||
|
||||
final char[] image1= arg1.getCharImage();
|
||||
final char[] image2= arg2.getCharImage();
|
||||
final int l1 = image1.length;
|
||||
|
@ -442,7 +500,7 @@ public class MacroExpander {
|
|||
final char[] image= new char[l1+l2];
|
||||
System.arraycopy(image1, 0, image, 0, l1);
|
||||
System.arraycopy(image2, 0, image, l1, l2);
|
||||
Lexer lex= new Lexer(image, fLexOptions, ILexerLog.NULL);
|
||||
Lexer lex= new Lexer(image, fLexOptions, ILexerLog.NULL, null);
|
||||
try {
|
||||
Token t1= lex.nextToken();
|
||||
Token t2= lex.nextToken();
|
||||
|
@ -461,17 +519,20 @@ public class MacroExpander {
|
|||
if (t == null) {
|
||||
return;
|
||||
}
|
||||
int endOffset= t.getOffset();
|
||||
for (; t != null; t= (Token) t.getNext()) {
|
||||
Token l= null;
|
||||
Token n;
|
||||
boolean space= false;
|
||||
for (; t != null; l=t, t= n) {
|
||||
n= (Token) t.getNext();
|
||||
if (!space && l != null && l.hasGap(t)) {
|
||||
buf.append(' ');
|
||||
space= true;
|
||||
}
|
||||
switch(t.getType()) {
|
||||
case IToken.tSTRING:
|
||||
case IToken.tLSTRING:
|
||||
case IToken.tCHAR:
|
||||
case IToken.tLCHAR:
|
||||
if (endOffset < t.getOffset()) {
|
||||
buf.append(' ');
|
||||
}
|
||||
endOffset= t.getEndOffset();
|
||||
final char[] image= t.getCharImage();
|
||||
for (int i = 0; i < image.length; i++) {
|
||||
final char c = image[i];
|
||||
|
@ -480,30 +541,21 @@ public class MacroExpander {
|
|||
}
|
||||
buf.append(c);
|
||||
}
|
||||
space= false;
|
||||
break;
|
||||
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
ExpansionBoundary sm= (ExpansionBoundary) t;
|
||||
if (sm.fIsStart) {
|
||||
if (endOffset < t.getOffset()) {
|
||||
buf.append(' ');
|
||||
}
|
||||
endOffset= Integer.MAX_VALUE;
|
||||
}
|
||||
else {
|
||||
endOffset= t.getEndOffset();
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
if (endOffset < t.getOffset()) {
|
||||
|
||||
case CPreprocessor.tSPACE:
|
||||
if (!space && l != null && n != null) {
|
||||
buf.append(' ');
|
||||
space= true;
|
||||
}
|
||||
endOffset= t.getEndOffset();
|
||||
break;
|
||||
|
||||
default:
|
||||
buf.append(t.getCharImage());
|
||||
space= false;
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -528,6 +580,7 @@ public class MacroExpander {
|
|||
t.setType(IToken.tIDENTIFIER);
|
||||
break;
|
||||
case CPreprocessor.tSCOPE_MARKER:
|
||||
case CPreprocessor.tSPACE:
|
||||
replacement.removeBehind(l);
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -62,6 +62,10 @@ abstract class PreprocessorMacro implements IMacroBinding {
|
|||
return null;
|
||||
}
|
||||
|
||||
public int hasVarArgs() {
|
||||
return FunctionStyleMacro.NO_VAARGS;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
char[][] p= getParameterList();
|
||||
if (p == null) {
|
||||
|
@ -118,10 +122,16 @@ class ObjectStyleMacro extends PreprocessorMacro {
|
|||
fEndOffset= endOffset;
|
||||
fExpansion= source;
|
||||
fExpansionTokens= expansion;
|
||||
if (expansion != null) {
|
||||
setSource(expansion.first());
|
||||
}
|
||||
}
|
||||
|
||||
public int findParameter(char[] tokenImage) {
|
||||
return -1;
|
||||
private void setSource(Token t) {
|
||||
while (t != null) {
|
||||
t.fSource= this;
|
||||
t= (Token) t.getNext();
|
||||
}
|
||||
}
|
||||
|
||||
public char[] getExpansion() {
|
||||
|
@ -158,7 +168,7 @@ class ObjectStyleMacro extends PreprocessorMacro {
|
|||
public TokenList getTokens(MacroDefinitionParser mdp, LexerOptions lexOptions) {
|
||||
if (fExpansionTokens == null) {
|
||||
fExpansionTokens= new TokenList();
|
||||
Lexer lex= new Lexer(fExpansion, fExpansionOffset, fEndOffset, lexOptions, ILexerLog.NULL);
|
||||
Lexer lex= new Lexer(fExpansion, fExpansionOffset, fEndOffset, lexOptions, ILexerLog.NULL, this);
|
||||
try {
|
||||
mdp.parseExpansion(lex, ILexerLog.NULL, getNameCharArray(), getParameterPlaceholderList(), fExpansionTokens);
|
||||
} catch (OffsetLimitReachedException e) {
|
||||
|
@ -259,16 +269,7 @@ class FunctionStyleMacro extends ObjectStyleMacro {
|
|||
public int hasVarArgs() {
|
||||
return fHasVarArgs;
|
||||
}
|
||||
|
||||
public int findParameter(final char[] identifier) {
|
||||
for (int i=0; i < fParamList.length; i++) {
|
||||
if (CharArrayUtils.equals(fParamList[i], identifier)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
public boolean isFunctionStyle() {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
|||
|
||||
|
||||
public class ScannerContextMacroExpansion extends ScannerContext {
|
||||
private static final Token END_TOKEN = new SimpleToken(Lexer.tEND_OF_INPUT, 0, 0);
|
||||
private static final Token END_TOKEN = new SimpleToken(Lexer.tEND_OF_INPUT, null, 0, 0);
|
||||
|
||||
private Token fTokens;
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ public final class ScannerContextPPDirective extends ScannerContext {
|
|||
private Token convertToken(Token t) {
|
||||
switch (t.getType()) {
|
||||
case Lexer.tNEWLINE:
|
||||
t= new SimpleToken(Lexer.tEND_OF_INPUT, t.getEndOffset(), t.getEndOffset());
|
||||
t= new SimpleToken(Lexer.tEND_OF_INPUT, null, t.getEndOffset(), t.getEndOffset());
|
||||
break;
|
||||
case IToken.tIDENTIFIER:
|
||||
if (fConvertDefinedToken && CharArrayUtils.equals(Keywords.cDEFINED, t.getCharImage())) {
|
||||
|
|
|
@ -22,11 +22,13 @@ public abstract class Token implements IToken, Cloneable {
|
|||
private int fOffset;
|
||||
private int fEndOffset;
|
||||
private IToken fNextToken;
|
||||
Object fSource;
|
||||
|
||||
Token(int kind, int offset, int endOffset) {
|
||||
Token(int kind, Object source, int offset, int endOffset) {
|
||||
fKind= kind;
|
||||
fOffset= offset;
|
||||
fEndOffset= endOffset;
|
||||
fSource= source;
|
||||
}
|
||||
|
||||
public int getType() {
|
||||
|
@ -65,6 +67,9 @@ public abstract class Token implements IToken, Cloneable {
|
|||
|
||||
public abstract char[] getCharImage();
|
||||
|
||||
public boolean hasGap(Token t) {
|
||||
return fSource == t.fSource && fEndOffset != t.getOffset();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return getImage();
|
||||
|
@ -113,8 +118,8 @@ public abstract class Token implements IToken, Cloneable {
|
|||
}
|
||||
|
||||
class SimpleToken extends Token {
|
||||
public SimpleToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
public SimpleToken(int kind, Object source, int offset, int endOffset) {
|
||||
super(kind, source, offset, endOffset);
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
|
@ -125,8 +130,8 @@ class SimpleToken extends Token {
|
|||
class PlaceHolderToken extends ImageToken {
|
||||
private final int fIndex;
|
||||
|
||||
public PlaceHolderToken(int type, int idx, int offset, int endOffset, char[] name) {
|
||||
super(type, offset, endOffset, name);
|
||||
public PlaceHolderToken(int type, int idx, Object source, int offset, int endOffset, char[] name) {
|
||||
super(type, source, offset, endOffset, name);
|
||||
fIndex= idx;
|
||||
}
|
||||
|
||||
|
@ -140,8 +145,8 @@ class PlaceHolderToken extends ImageToken {
|
|||
}
|
||||
|
||||
class DigraphToken extends Token {
|
||||
public DigraphToken(int kind, int offset, int endOffset) {
|
||||
super(kind, offset, endOffset);
|
||||
public DigraphToken(int kind, Object source, int offset, int endOffset) {
|
||||
super(kind, source, offset, endOffset);
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
|
@ -152,8 +157,8 @@ class DigraphToken extends Token {
|
|||
class ImageToken extends Token {
|
||||
private char[] fImage;
|
||||
|
||||
public ImageToken(int kind, int offset, int endOffset, char[] image) {
|
||||
super(kind, offset, endOffset);
|
||||
public ImageToken(int kind, Object source, int offset, int endOffset, char[] image) {
|
||||
super(kind, source, offset, endOffset);
|
||||
fImage= image;
|
||||
}
|
||||
|
||||
|
@ -163,20 +168,19 @@ class ImageToken extends Token {
|
|||
}
|
||||
|
||||
class SourceImageToken extends Token {
|
||||
|
||||
private char[] fSource;
|
||||
private char[] fSourceImage;
|
||||
private char[] fImage;
|
||||
|
||||
public SourceImageToken(int kind, int offset, int endOffset, char[] source) {
|
||||
super(kind, offset, endOffset);
|
||||
fSource= source;
|
||||
public SourceImageToken(int kind, Object source, int offset, int endOffset, char[] sourceImage) {
|
||||
super(kind, source, offset, endOffset);
|
||||
fSourceImage= sourceImage;
|
||||
}
|
||||
|
||||
public char[] getCharImage() {
|
||||
if (fImage == null) {
|
||||
final int length= getLength();
|
||||
fImage= new char[length];
|
||||
System.arraycopy(fSource, getOffset(), fImage, 0, length);
|
||||
System.arraycopy(fSourceImage, getOffset(), fImage, 0, length);
|
||||
}
|
||||
return fImage;
|
||||
}
|
||||
|
|
|
@ -52,7 +52,9 @@ class TokenList {
|
|||
final public TokenList cloneTokens() {
|
||||
TokenList result= new TokenList();
|
||||
for (Token t= fFirst; t != null; t= (Token) t.getNext()) {
|
||||
result.append((Token) t.clone());
|
||||
if (t.getType() != CPreprocessor.tSCOPE_MARKER) {
|
||||
result.append((Token) t.clone());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -61,7 +63,7 @@ class TokenList {
|
|||
return fFirst;
|
||||
}
|
||||
|
||||
public void removeBehind(Token l) {
|
||||
final void removeBehind(Token l) {
|
||||
if (l == null) {
|
||||
Token t= fFirst;
|
||||
if (t != null) {
|
||||
|
@ -82,4 +84,14 @@ class TokenList {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
void cutAfter(Token l) {
|
||||
if (l == null) {
|
||||
fFirst= fLast= null;
|
||||
}
|
||||
else {
|
||||
l.setNext(null);
|
||||
fLast= l;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,10 +13,11 @@ package org.eclipse.cdt.internal.core.parser.scanner;
|
|||
import org.eclipse.cdt.core.parser.IGCCToken;
|
||||
import org.eclipse.cdt.core.parser.IToken;
|
||||
import org.eclipse.cdt.core.parser.Keywords;
|
||||
import org.eclipse.cdt.core.parser.util.CharArrayUtils;
|
||||
|
||||
|
||||
public class TokenUtil {
|
||||
private static final char[] IMAGE_EMPTY = new char[0];
|
||||
private static final char[] SPACE = {' '};
|
||||
private static final char[] IMAGE_POUND_POUND = "##".toCharArray(); //$NON-NLS-1$
|
||||
private static final char[] IMAGE_POUND = "#".toCharArray(); //$NON-NLS-1$
|
||||
|
||||
|
@ -122,8 +123,10 @@ public class TokenUtil {
|
|||
case IGCCToken.tMIN: return Keywords.cpMIN;
|
||||
case IGCCToken.tMAX: return Keywords.cpMAX;
|
||||
|
||||
case CPreprocessor.tSPACE: return SPACE;
|
||||
|
||||
default:
|
||||
return IMAGE_EMPTY;
|
||||
return CharArrayUtils.EMPTY;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -138,7 +141,7 @@ public class TokenUtil {
|
|||
|
||||
default:
|
||||
assert false: type;
|
||||
return IMAGE_EMPTY;
|
||||
return CharArrayUtils.EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue