mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
Let the lexer work on an abstract class rather than on char[], bug 294430.
This commit is contained in:
parent
7e5c056111
commit
14d210a876
9 changed files with 245 additions and 53 deletions
|
@ -41,7 +41,7 @@ public class PreprocessorSpeedTest {
|
||||||
if (args.length > 0)
|
if (args.length > 0)
|
||||||
stream = new PrintStream(new FileOutputStream(args[0]));
|
stream = new PrintStream(new FileOutputStream(args[0]));
|
||||||
|
|
||||||
new PreprocessorSpeedTest().runTest(stream, 30);
|
new PreprocessorSpeedTest().runTest(stream, 200);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println(e);
|
System.out.println(e);
|
||||||
}
|
}
|
||||||
|
@ -122,10 +122,109 @@ public class PreprocessorSpeedTest {
|
||||||
return msvc98ScannerInfo();
|
return msvc98ScannerInfo();
|
||||||
else if (config.equals("ydl"))
|
else if (config.equals("ydl"))
|
||||||
return ydlScannerInfo();
|
return ydlScannerInfo();
|
||||||
|
else if (config.equals("cygwin"))
|
||||||
|
return cygwinScannerInfo();
|
||||||
else
|
else
|
||||||
return mingwScannerInfo();
|
return mingwScannerInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private IScannerInfo cygwinScannerInfo() {
|
||||||
|
// TODO It would be easier and more flexible if we used discovery for this
|
||||||
|
Map definitions = new Hashtable();
|
||||||
|
definitions.put("i386", "1");
|
||||||
|
definitions.put("unix", "1");
|
||||||
|
definitions.put("_cdecl", "__attribute__((__cdecl__))");
|
||||||
|
definitions.put("_fastcall", "__attribute__((__fastcall__))");
|
||||||
|
definitions.put("_stdcall", "__attribute__((__stdcall__))");
|
||||||
|
definitions.put("_X86_", "1");
|
||||||
|
definitions.put("__CHAR_BIT__", "8");
|
||||||
|
definitions.put("__cplusplus", "1");
|
||||||
|
definitions.put("__CYGWIN32__", "1");
|
||||||
|
definitions.put("__CYGWIN__", "1");
|
||||||
|
definitions.put("__DBL_DENORM_MIN__", "4.9406564584124654e-324");
|
||||||
|
definitions.put("__DBL_DIG__", "15");
|
||||||
|
definitions.put("__DBL_EPSILON__", "2.2204460492503131e-16");
|
||||||
|
definitions.put("__DBL_HAS_INFINITY__", "1");
|
||||||
|
definitions.put("__DBL_HAS_QUIET_NAN__", "1");
|
||||||
|
definitions.put("__DBL_MANT_DIG__", "53");
|
||||||
|
definitions.put("__DBL_MAX_10_EXP__", "308");
|
||||||
|
definitions.put("__DBL_MAX_EXP__", "1024");
|
||||||
|
definitions.put("__DBL_MAX__", "1.7976931348623157e+308");
|
||||||
|
definitions.put("__DBL_MIN_10_EXP__", "(-307)");
|
||||||
|
definitions.put("__DBL_MIN_EXP__", "(-1021)");
|
||||||
|
definitions.put("__DBL_MIN__", "2.2250738585072014e-308");
|
||||||
|
definitions.put("__DECIMAL_DIG__", "21");
|
||||||
|
definitions.put("__declspec(x)", "__attribute__((x))");
|
||||||
|
definitions.put("__DEPRECATED", "1");
|
||||||
|
definitions.put("__EXCEPTIONS", "1");
|
||||||
|
definitions.put("__fastcall", "__attribute__((__fastcall__))");
|
||||||
|
definitions.put("__FINITE_MATH_ONLY__", "0");
|
||||||
|
definitions.put("__FLT_DENORM_MIN__", "1.40129846e-45F");
|
||||||
|
definitions.put("__FLT_DIG__", "6");
|
||||||
|
definitions.put("__FLT_EPSILON__", "1.19209290e-7F");
|
||||||
|
definitions.put("__FLT_EVAL_METHOD__", "2");
|
||||||
|
definitions.put("__FLT_HAS_INFINITY__", "1");
|
||||||
|
definitions.put("__FLT_HAS_QUIET_NAN__", "1");
|
||||||
|
definitions.put("__FLT_MANT_DIG__", "24");
|
||||||
|
definitions.put("__FLT_MAX_10_EXP__", "38");
|
||||||
|
definitions.put("__FLT_MAX_EXP__", "128");
|
||||||
|
definitions.put("__FLT_MAX__", "3.40282347e+38F");
|
||||||
|
definitions.put("__FLT_MIN_10_EXP__", "(-37)");
|
||||||
|
definitions.put("__FLT_MIN_EXP__", "(-125)");
|
||||||
|
definitions.put("__FLT_MIN__", "1.17549435e-38F");
|
||||||
|
definitions.put("__FLT_RADIX__", "2");
|
||||||
|
definitions.put("__GNUC_MINOR__", "4");
|
||||||
|
definitions.put("__GNUC_PATCHLEVEL__", "4");
|
||||||
|
definitions.put("__GNUC__", "3");
|
||||||
|
definitions.put("__GNUG__", "3");
|
||||||
|
definitions.put("__GXX_ABI_VERSION", "1002");
|
||||||
|
definitions.put("__GXX_WEAK__", "1");
|
||||||
|
definitions.put("__i386", "1");
|
||||||
|
definitions.put("__i386__", "1");
|
||||||
|
definitions.put("__INT_MAX__", "2147483647");
|
||||||
|
definitions.put("__LDBL_DENORM_MIN__", "3.64519953188247460253e-4951L");
|
||||||
|
definitions.put("__LDBL_DIG__", "18");
|
||||||
|
definitions.put("__LDBL_EPSILON__", "1.08420217248550443401e-19L");
|
||||||
|
definitions.put("__LDBL_HAS_INFINITY__", "1");
|
||||||
|
definitions.put("__LDBL_HAS_QUIET_NAN__", "1");
|
||||||
|
definitions.put("__LDBL_MANT_DIG__", "64");
|
||||||
|
definitions.put("__LDBL_MAX_10_EXP__", "4932");
|
||||||
|
definitions.put("__LDBL_MAX_EXP__", "16384");
|
||||||
|
definitions.put("__LDBL_MAX__", "1.18973149535723176502e+4932L");
|
||||||
|
definitions.put("__LDBL_MIN_10_EXP__", "(-4931)");
|
||||||
|
definitions.put("__LDBL_MIN_EXP__", "(-16381)");
|
||||||
|
definitions.put("__LDBL_MIN__", "3.36210314311209350626e-4932L");
|
||||||
|
definitions.put("__LONG_LONG_MAX__", "9223372036854775807LL");
|
||||||
|
definitions.put("__LONG_MAX__", "2147483647L");
|
||||||
|
definitions.put("__NO_INLINE__", "1");
|
||||||
|
definitions.put("__PTRDIFF_TYPE__", "int");
|
||||||
|
definitions.put("__REGISTER_PREFIX__", "");
|
||||||
|
definitions.put("__SCHAR_MAX__", "127");
|
||||||
|
definitions.put("__SHRT_MAX__", "32767");
|
||||||
|
definitions.put("__SIZE_TYPE__", "unsigned int");
|
||||||
|
definitions.put("__stdcall", "__attribute__((__stdcall__))");
|
||||||
|
definitions.put("__STDC_HOSTED__", "1");
|
||||||
|
definitions.put("__tune_i686__", "1");
|
||||||
|
definitions.put("__tune_pentiumpro__", "1");
|
||||||
|
definitions.put("__unix", "1");
|
||||||
|
definitions.put("__unix__", "1");
|
||||||
|
definitions.put("__USER_LABEL_PREFIX__", "_");
|
||||||
|
definitions.put("__USING_SJLJ_EXCEPTIONS__", "1");
|
||||||
|
definitions.put("__VERSION__", "\"3.4.4 (cygming special, gdc 0.12, using dmd 0.125)\"");
|
||||||
|
definitions.put("__WCHAR_MAX__", "65535U");
|
||||||
|
definitions.put("__WCHAR_TYPE__", "short unsigned int");
|
||||||
|
definitions.put("__WCHAR_UNSIGNED__", "1");
|
||||||
|
definitions.put("__WINT_TYPE__", "unsigned int");
|
||||||
|
|
||||||
|
String[] includePaths = new String[] { "C:/programs/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++",
|
||||||
|
"C:/programs/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++/i686-pc-cygwin",
|
||||||
|
"C:/programs/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++/backward",
|
||||||
|
"C:/programs/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include", "C:/programs/cygwin/usr/include",
|
||||||
|
"C:/programs/cygwin/usr/include/w32api" };
|
||||||
|
|
||||||
|
return new ScannerInfo(definitions, includePaths);
|
||||||
|
}
|
||||||
|
|
||||||
private IScannerInfo msvcScannerInfo() {
|
private IScannerInfo msvcScannerInfo() {
|
||||||
Map definitions = new Hashtable();
|
Map definitions = new Hashtable();
|
||||||
//definitions.put( "__GNUC__", "3" ); //$NON-NLS-1$ //$NON-NLS-2$
|
//definitions.put( "__GNUC__", "3" ); //$NON-NLS-1$ //$NON-NLS-2$
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
/*******************************************************************************
|
||||||
|
* Copyright (c) 2009 Wind River Systems, Inc. and others.
|
||||||
|
* All rights reserved. This program and the accompanying materials
|
||||||
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
|
* which accompanies this distribution, and is available at
|
||||||
|
* http://www.eclipse.org/legal/epl-v10.html
|
||||||
|
*
|
||||||
|
* Contributors:
|
||||||
|
* Markus Schorn - initial API and implementation
|
||||||
|
*******************************************************************************/
|
||||||
|
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract class for providing input to the lexer.
|
||||||
|
*/
|
||||||
|
public abstract class AbstractCharArray {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether the given offset is valid for this array. Subclasses may assume
|
||||||
|
* that offset is non-negative.
|
||||||
|
*/
|
||||||
|
public abstract boolean isValidOffset(int offset);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the limit for valid offsets or -1 if it is unknown. All offsets below
|
||||||
|
* the given limit are guaranteed to be valid.
|
||||||
|
*/
|
||||||
|
public abstract int getLimit();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the character at the given position, subclasses do not have to do range checks.
|
||||||
|
*/
|
||||||
|
public abstract char get(int offset);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy a range of characters to the given destination. Subclasses do not have to do any
|
||||||
|
* range checks.
|
||||||
|
*/
|
||||||
|
public abstract void arraycopy(int offset, char[] destination, int destinationPos, int length);
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*******************************************************************************
|
||||||
|
* Copyright (c) 2009 Wind River Systems, Inc. and others.
|
||||||
|
* All rights reserved. This program and the accompanying materials
|
||||||
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
|
* which accompanies this distribution, and is available at
|
||||||
|
* http://www.eclipse.org/legal/epl-v10.html
|
||||||
|
*
|
||||||
|
* Contributors:
|
||||||
|
* Markus Schorn - initial API and implementation
|
||||||
|
*******************************************************************************/
|
||||||
|
package org.eclipse.cdt.internal.core.parser.scanner;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper around char[] to implement {@link AbstractCharArray}.
|
||||||
|
*/
|
||||||
|
public final class CharArray extends AbstractCharArray {
|
||||||
|
|
||||||
|
private final char[] fArray;
|
||||||
|
|
||||||
|
public CharArray(char[] array) {
|
||||||
|
fArray= array;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getLimit() {
|
||||||
|
return fArray.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public char get(int pos) {
|
||||||
|
return fArray[pos];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void arraycopy(int offset, char[] destination, int destPos, int length) {
|
||||||
|
System.arraycopy(fArray, offset, destination, destPos, length);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isValidOffset(int offset) {
|
||||||
|
return offset < fArray.length;
|
||||||
|
}
|
||||||
|
}
|
|
@ -71,7 +71,7 @@ final public class Lexer implements ITokenSequence {
|
||||||
private final Object fSource;
|
private final Object fSource;
|
||||||
|
|
||||||
// the input to the lexer
|
// the input to the lexer
|
||||||
private final char[] fInput;
|
private final AbstractCharArray fInput;
|
||||||
private int fStart;
|
private int fStart;
|
||||||
private int fLimit;
|
private int fLimit;
|
||||||
|
|
||||||
|
@ -89,12 +89,11 @@ final public class Lexer implements ITokenSequence {
|
||||||
private int fMarkEndOffset;
|
private int fMarkEndOffset;
|
||||||
private int fMarkPrefetchedChar;
|
private int fMarkPrefetchedChar;
|
||||||
|
|
||||||
|
|
||||||
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
public Lexer(char[] input, LexerOptions options, ILexerLog log, Object source) {
|
||||||
this(input, 0, input.length, options, log, source);
|
this(new CharArray(input), 0, input.length, options, log, source);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Lexer(char[] input, int start, int end, LexerOptions options, ILexerLog log, Object source) {
|
public Lexer(AbstractCharArray input, int start, int end, LexerOptions options, ILexerLog log, Object source) {
|
||||||
fInput= input;
|
fInput= input;
|
||||||
fStart= fOffset= fEndOffset= start;
|
fStart= fOffset= fEndOffset= start;
|
||||||
fLimit= end;
|
fLimit= end;
|
||||||
|
@ -105,6 +104,13 @@ final public class Lexer implements ITokenSequence {
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isValidOffset(int pos) {
|
||||||
|
if (fLimit < 0)
|
||||||
|
return fInput.isValidOffset(pos);
|
||||||
|
|
||||||
|
return pos < fLimit;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the source that is attached to the tokens generated by this lexer
|
* Returns the source that is attached to the tokens generated by this lexer
|
||||||
*/
|
*/
|
||||||
|
@ -117,7 +123,9 @@ final public class Lexer implements ITokenSequence {
|
||||||
*/
|
*/
|
||||||
public void setContentAssistMode(int offset) {
|
public void setContentAssistMode(int offset) {
|
||||||
fSupportContentAssist= true;
|
fSupportContentAssist= true;
|
||||||
fLimit= Math.min(offset, fInput.length);
|
if (isValidOffset(offset)) {
|
||||||
|
fLimit= offset;
|
||||||
|
}
|
||||||
// re-initialize
|
// re-initialize
|
||||||
fOffset= fEndOffset= fStart;
|
fOffset= fEndOffset= fStart;
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
|
@ -214,17 +222,17 @@ final public class Lexer implements ITokenSequence {
|
||||||
// optimization avoids calling nextCharPhase3
|
// optimization avoids calling nextCharPhase3
|
||||||
int d;
|
int d;
|
||||||
final int pos= fEndOffset;
|
final int pos= fEndOffset;
|
||||||
if (pos+1 >= fLimit) {
|
if (!isValidOffset(pos+1)) {
|
||||||
d= nextCharPhase3();
|
d= nextCharPhase3();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
d= fInput[pos];
|
d= fInput.get(pos);
|
||||||
switch(d) {
|
switch(d) {
|
||||||
case '\\':
|
case '\\':
|
||||||
d= nextCharPhase3();
|
d= nextCharPhase3();
|
||||||
break;
|
break;
|
||||||
case '?':
|
case '?':
|
||||||
if (fInput[pos+1] == '?') {
|
if (fInput.get(pos+1) == '?') {
|
||||||
d= nextCharPhase3();
|
d= nextCharPhase3();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -671,7 +679,7 @@ final public class Lexer implements ITokenSequence {
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
image= new char[imageLength];
|
image= new char[imageLength];
|
||||||
System.arraycopy(fInput, offset, image, 0, imageLength);
|
fInput.arraycopy(offset, image, 0, imageLength);
|
||||||
}
|
}
|
||||||
return new TokenWithImage(kind, fSource, offset, endOffset, image);
|
return new TokenWithImage(kind, fSource, offset, endOffset, image);
|
||||||
}
|
}
|
||||||
|
@ -713,8 +721,8 @@ final public class Lexer implements ITokenSequence {
|
||||||
private void blockComment(final int start, final char trigger) {
|
private void blockComment(final int start, final char trigger) {
|
||||||
// we can ignore line-splices, trigraphs and windows newlines when searching for the '*'
|
// we can ignore line-splices, trigraphs and windows newlines when searching for the '*'
|
||||||
int pos= fEndOffset;
|
int pos= fEndOffset;
|
||||||
while(pos < fLimit) {
|
while(isValidOffset(pos)) {
|
||||||
if (fInput[pos++] == trigger) {
|
if (fInput.get(pos++) == trigger) {
|
||||||
fEndOffset= pos;
|
fEndOffset= pos;
|
||||||
if (nextCharPhase3() == '/') {
|
if (nextCharPhase3() == '/') {
|
||||||
nextCharPhase3();
|
nextCharPhase3();
|
||||||
|
@ -998,27 +1006,27 @@ final public class Lexer implements ITokenSequence {
|
||||||
private int nextCharPhase3() {
|
private int nextCharPhase3() {
|
||||||
int pos= fEndOffset;
|
int pos= fEndOffset;
|
||||||
do {
|
do {
|
||||||
if (pos+1 >= fLimit) {
|
if (!isValidOffset(pos+1)) {
|
||||||
if (pos >= fLimit) {
|
if (!isValidOffset(pos)) {
|
||||||
fOffset= fLimit;
|
fOffset= pos;
|
||||||
fEndOffset= fLimit;
|
fEndOffset= pos;
|
||||||
fCharPhase3= END_OF_INPUT;
|
fCharPhase3= END_OF_INPUT;
|
||||||
return END_OF_INPUT;
|
return END_OF_INPUT;
|
||||||
}
|
}
|
||||||
fOffset= pos;
|
fOffset= pos;
|
||||||
fEndOffset= pos+1;
|
fEndOffset= pos+1;
|
||||||
fCharPhase3= fInput[pos];
|
fCharPhase3= fInput.get(pos);
|
||||||
return fCharPhase3;
|
return fCharPhase3;
|
||||||
}
|
}
|
||||||
|
|
||||||
final char c= fInput[pos];
|
final char c= fInput.get(pos);
|
||||||
fOffset= pos;
|
fOffset= pos;
|
||||||
fEndOffset= ++pos;
|
fEndOffset= ++pos;
|
||||||
fCharPhase3= c;
|
fCharPhase3= c;
|
||||||
switch(c) {
|
switch(c) {
|
||||||
// windows line-ending
|
// windows line-ending
|
||||||
case '\r':
|
case '\r':
|
||||||
if (fInput[pos] == '\n') {
|
if (fInput.get(pos) == '\n') {
|
||||||
fEndOffset= pos+1;
|
fEndOffset= pos+1;
|
||||||
fCharPhase3= '\n';
|
fCharPhase3= '\n';
|
||||||
return '\n';
|
return '\n';
|
||||||
|
@ -1027,10 +1035,10 @@ final public class Lexer implements ITokenSequence {
|
||||||
|
|
||||||
// trigraph sequences
|
// trigraph sequences
|
||||||
case '?':
|
case '?':
|
||||||
if (fInput[pos] != '?' || pos+1 >= fLimit) {
|
if (fInput.get(pos) != '?' || !isValidOffset(pos+1)) {
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
final char trigraph= checkTrigraph(fInput[pos+1]);
|
final char trigraph= checkTrigraph(fInput.get(pos+1));
|
||||||
if (trigraph == 0) {
|
if (trigraph == 0) {
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
|
@ -1086,8 +1094,8 @@ final public class Lexer implements ITokenSequence {
|
||||||
private int findEndOfLineSpliceSequence(int pos) {
|
private int findEndOfLineSpliceSequence(int pos) {
|
||||||
boolean haveBackslash= true;
|
boolean haveBackslash= true;
|
||||||
int result= -1;
|
int result= -1;
|
||||||
loop: while(pos < fLimit) {
|
loop: while(isValidOffset(pos)) {
|
||||||
switch(fInput[pos++]) {
|
switch(fInput.get(pos++)) {
|
||||||
case '\n':
|
case '\n':
|
||||||
if (haveBackslash) {
|
if (haveBackslash) {
|
||||||
result= pos;
|
result= pos;
|
||||||
|
@ -1103,7 +1111,7 @@ final public class Lexer implements ITokenSequence {
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
case '?':
|
case '?':
|
||||||
if (pos+1 >= fLimit || fInput[pos] != '?' || fInput[++pos] != '/') {
|
if (!isValidOffset(pos+1) || fInput.get(pos) != '?' || fInput.get(++pos) != '/') {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
// fall through to backslash handling
|
// fall through to backslash handling
|
||||||
|
@ -1128,11 +1136,11 @@ final public class Lexer implements ITokenSequence {
|
||||||
public char[] getInputChars(int offset, int endOffset) {
|
public char[] getInputChars(int offset, int endOffset) {
|
||||||
final int length= endOffset-offset;
|
final int length= endOffset-offset;
|
||||||
final char[] result= new char[length];
|
final char[] result= new char[length];
|
||||||
System.arraycopy(fInput, offset, result, 0, length);
|
fInput.arraycopy(offset, result, 0, length);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
char[] getInput() {
|
AbstractCharArray getInput() {
|
||||||
return fInput;
|
return fInput;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Copyright (c) 2007, 2008 Wind River Systems, Inc. and others.
|
* Copyright (c) 2007, 2009 Wind River Systems, Inc. and others.
|
||||||
* All rights reserved. This program and the accompanying materials
|
* All rights reserved. This program and the accompanying materials
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
* which accompanies this distribution, and is available at
|
* which accompanies this distribution, and is available at
|
||||||
|
@ -61,7 +61,7 @@ public class MacroDefinitionParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static char[] getExpansion(char[] expansionImage, int offset, int endOffset) {
|
public static char[] getExpansion(AbstractCharArray expansionImage, int offset, int endOffset) {
|
||||||
TokenList tl= new TokenList();
|
TokenList tl= new TokenList();
|
||||||
Lexer lex= new Lexer(expansionImage, offset, endOffset, new LexerOptions(), ILexerLog.NULL, null);
|
Lexer lex= new Lexer(expansionImage, offset, endOffset, new LexerOptions(), ILexerLog.NULL, null);
|
||||||
try {
|
try {
|
||||||
|
@ -111,7 +111,7 @@ public class MacroDefinitionParser {
|
||||||
public ObjectStyleMacro parseMacroDefinition(final Lexer lexer, final ILexerLog log)
|
public ObjectStyleMacro parseMacroDefinition(final Lexer lexer, final ILexerLog log)
|
||||||
throws OffsetLimitReachedException, InvalidMacroDefinitionException {
|
throws OffsetLimitReachedException, InvalidMacroDefinitionException {
|
||||||
final Token name = parseName(lexer);
|
final Token name = parseName(lexer);
|
||||||
final char[] source= lexer.getInput();
|
final AbstractCharArray source= lexer.getInput();
|
||||||
final char[] nameChars= name.getCharImage();
|
final char[] nameChars= name.getCharImage();
|
||||||
final char[][] paramList= parseParamList(lexer, name);
|
final char[][] paramList= parseParamList(lexer, name);
|
||||||
final TokenList replacement= new TokenList();
|
final TokenList replacement= new TokenList();
|
||||||
|
|
|
@ -197,7 +197,7 @@ public class MacroExpander {
|
||||||
Lexer lexer= new Lexer(fFixedInput, fLexOptions, fLog, this);
|
Lexer lexer= new Lexer(fFixedInput, fLexOptions, fLog, this);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
tracker.start(lexer);
|
tracker.start(fFixedInput);
|
||||||
Token identifier= lexer.nextToken();
|
Token identifier= lexer.nextToken();
|
||||||
if (identifier.getType() != IToken.tIDENTIFIER) {
|
if (identifier.getType() != IToken.tIDENTIFIER) {
|
||||||
tracker.fail();
|
tracker.fail();
|
||||||
|
@ -252,8 +252,7 @@ public class MacroExpander {
|
||||||
lastConsumed= parseArguments(input, (FunctionStyleMacro) macro, forbidden, argInputs, tracker);
|
lastConsumed= parseArguments(input, (FunctionStyleMacro) macro, forbidden, argInputs, tracker);
|
||||||
} catch (AbortMacroExpansionException e) {
|
} catch (AbortMacroExpansionException e) {
|
||||||
// ignore this macro expansion
|
// ignore this macro expansion
|
||||||
for (int i = 0; i < argInputs.length; i++) {
|
for (TokenSource argInput : argInputs) {
|
||||||
TokenSource argInput= argInputs[i];
|
|
||||||
executeScopeMarkers(argInput, forbidden);
|
executeScopeMarkers(argInput, forbidden);
|
||||||
if (tracker != null) {
|
if (tracker != null) {
|
||||||
tracker.setExpandedMacroArgument(null);
|
tracker.setExpandedMacroArgument(null);
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Copyright (c) 2008 Wind River Systems, Inc. and others.
|
* Copyright (c) 2008, 2009 Wind River Systems, Inc. and others.
|
||||||
* All rights reserved. This program and the accompanying materials
|
* All rights reserved. This program and the accompanying materials
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
* which accompanies this distribution, and is available at
|
* which accompanies this distribution, and is available at
|
||||||
|
@ -41,7 +41,7 @@ public class MacroExpansionTracker {
|
||||||
private ReplaceEdit fReplacement;
|
private ReplaceEdit fReplacement;
|
||||||
private IMacroBinding fMacroDefinition;
|
private IMacroBinding fMacroDefinition;
|
||||||
|
|
||||||
private Lexer fLexer;
|
private char[] fInput;
|
||||||
private String fReplacementText= ""; //$NON-NLS-1$
|
private String fReplacementText= ""; //$NON-NLS-1$
|
||||||
private LinkedList<MacroInfo> fMacroStack= new LinkedList<MacroInfo>();
|
private LinkedList<MacroInfo> fMacroStack= new LinkedList<MacroInfo>();
|
||||||
|
|
||||||
|
@ -97,8 +97,8 @@ public class MacroExpansionTracker {
|
||||||
/**
|
/**
|
||||||
* Informs the tracker that macro expansion is started.
|
* Informs the tracker that macro expansion is started.
|
||||||
*/
|
*/
|
||||||
void start(Lexer lexer) {
|
void start(char[] input) {
|
||||||
fLexer= lexer;
|
fInput= input;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -108,7 +108,7 @@ public class MacroExpansionTracker {
|
||||||
* @param endOffset the end offset of the input that was read from the lexer.
|
* @param endOffset the end offset of the input that was read from the lexer.
|
||||||
*/
|
*/
|
||||||
void finish(TokenList result, int endOffset) {
|
void finish(TokenList result, int endOffset) {
|
||||||
final char[] lexInput = fLexer.getInput();
|
final char[] lexInput = fInput;
|
||||||
if (!isDone()) {
|
if (!isDone()) {
|
||||||
// special case we compute the entire expansion as one step, the result contains the
|
// special case we compute the entire expansion as one step, the result contains the
|
||||||
// expanded text
|
// expanded text
|
||||||
|
@ -138,7 +138,7 @@ public class MacroExpansionTracker {
|
||||||
* There was no macro at the beginning of the input.
|
* There was no macro at the beginning of the input.
|
||||||
*/
|
*/
|
||||||
void fail() {
|
void fail() {
|
||||||
fPreStep= new String(fLexer.getInput());
|
fPreStep= new String(fInput);
|
||||||
fReplacement= new ReplaceEdit(0, 0, ""); //$NON-NLS-1$
|
fReplacement= new ReplaceEdit(0, 0, ""); //$NON-NLS-1$
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -229,7 +229,7 @@ public class MacroExpansionTracker {
|
||||||
appendFunctionStyleMacro(result);
|
appendFunctionStyleMacro(result);
|
||||||
fReplaceTo= result.last();
|
fReplaceTo= result.last();
|
||||||
StringBuilder buf= new StringBuilder();
|
StringBuilder buf= new StringBuilder();
|
||||||
toString(replacement, fLexer.getInput(), buf, buf, buf);
|
toString(replacement, fInput, buf, buf, buf);
|
||||||
fReplacementText= buf.toString();
|
fReplacementText= buf.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -329,7 +329,7 @@ public class MacroExpansionTracker {
|
||||||
fReplaceFrom= fReplaceTo= identifier;
|
fReplaceFrom= fReplaceTo= identifier;
|
||||||
result.append(identifier);
|
result.append(identifier);
|
||||||
StringBuilder buf= new StringBuilder();
|
StringBuilder buf= new StringBuilder();
|
||||||
toString(replacement, fLexer.getInput(), buf, buf, buf);
|
toString(replacement, fInput, buf, buf, buf);
|
||||||
fReplacementText= buf.toString();
|
fReplacementText= buf.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Copyright (c) 2007, 2008 Wind River Systems, Inc. and others.
|
* Copyright (c) 2007, 2009 Wind River Systems, Inc. and others.
|
||||||
* All rights reserved. This program and the accompanying materials
|
* All rights reserved. This program and the accompanying materials
|
||||||
* are made available under the terms of the Eclipse Public License v1.0
|
* are made available under the terms of the Eclipse Public License v1.0
|
||||||
* which accompanies this distribution, and is available at
|
* which accompanies this distribution, and is available at
|
||||||
|
@ -101,16 +101,17 @@ abstract class PreprocessorMacro implements IMacroBinding {
|
||||||
}
|
}
|
||||||
|
|
||||||
class ObjectStyleMacro extends PreprocessorMacro {
|
class ObjectStyleMacro extends PreprocessorMacro {
|
||||||
private final char[] fExpansion;
|
private final AbstractCharArray fExpansion;
|
||||||
final int fExpansionOffset;
|
final int fExpansionOffset;
|
||||||
final int fEndOffset;
|
final int fEndOffset;
|
||||||
private TokenList fExpansionTokens;
|
private TokenList fExpansionTokens;
|
||||||
|
|
||||||
|
|
||||||
public ObjectStyleMacro(char[] name, char[] expansion) {
|
public ObjectStyleMacro(char[] name, char[] expansion) {
|
||||||
this(name, 0, expansion.length, null, expansion);
|
this(name, 0, expansion.length, null, new CharArray(expansion));
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectStyleMacro(char[] name, int expansionOffset, int endOffset, TokenList expansion, char[] source) {
|
public ObjectStyleMacro(char[] name, int expansionOffset, int endOffset, TokenList expansion, AbstractCharArray source) {
|
||||||
super(name);
|
super(name);
|
||||||
fExpansionOffset= expansionOffset;
|
fExpansionOffset= expansionOffset;
|
||||||
fEndOffset= endOffset;
|
fEndOffset= endOffset;
|
||||||
|
@ -144,11 +145,8 @@ class ObjectStyleMacro extends PreprocessorMacro {
|
||||||
|
|
||||||
public char[] getExpansionImage() {
|
public char[] getExpansionImage() {
|
||||||
final int length = fEndOffset - fExpansionOffset;
|
final int length = fEndOffset - fExpansionOffset;
|
||||||
if (length == fExpansion.length) {
|
|
||||||
return fExpansion;
|
|
||||||
}
|
|
||||||
char[] result= new char[length];
|
char[] result= new char[length];
|
||||||
System.arraycopy(fExpansion, fExpansionOffset, result, 0, length);
|
fExpansion.arraycopy(fExpansionOffset, result, 0, length);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,13 +180,16 @@ class FunctionStyleMacro extends ObjectStyleMacro {
|
||||||
private char[] fSignature;
|
private char[] fSignature;
|
||||||
|
|
||||||
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, char[] expansion) {
|
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, char[] expansion) {
|
||||||
super(name, expansion);
|
this(name, paramList, hasVarArgs, 0, expansion.length, null, new CharArray(expansion));
|
||||||
fParamList = paramList;
|
}
|
||||||
fHasVarArgs= hasVarArgs;
|
|
||||||
|
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, AbstractCharArray expansion,
|
||||||
|
int expansionOffset, int expansionEndOffset) {
|
||||||
|
this(name, paramList, hasVarArgs, expansionOffset, expansionEndOffset, null, expansion);
|
||||||
}
|
}
|
||||||
|
|
||||||
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, int expansionFileOffset, int endFileOffset,
|
public FunctionStyleMacro(char[] name, char[][] paramList, int hasVarArgs, int expansionFileOffset, int endFileOffset,
|
||||||
TokenList expansion, char[] source) {
|
TokenList expansion, AbstractCharArray source) {
|
||||||
super(name, expansionFileOffset, endFileOffset, expansion, source);
|
super(name, expansionFileOffset, endFileOffset, expansion, source);
|
||||||
fParamList = paramList;
|
fParamList = paramList;
|
||||||
fHasVarArgs= hasVarArgs;
|
fHasVarArgs= hasVarArgs;
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.eclipse.cdt.internal.core.index.IIndexFragment;
|
||||||
import org.eclipse.cdt.internal.core.index.IIndexFragmentBinding;
|
import org.eclipse.cdt.internal.core.index.IIndexFragmentBinding;
|
||||||
import org.eclipse.cdt.internal.core.index.IIndexFragmentName;
|
import org.eclipse.cdt.internal.core.index.IIndexFragmentName;
|
||||||
import org.eclipse.cdt.internal.core.index.IIndexScope;
|
import org.eclipse.cdt.internal.core.index.IIndexScope;
|
||||||
|
import org.eclipse.cdt.internal.core.parser.scanner.CharArray;
|
||||||
import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser;
|
import org.eclipse.cdt.internal.core.parser.scanner.MacroDefinitionParser;
|
||||||
import org.eclipse.cdt.internal.core.pdom.PDOM;
|
import org.eclipse.cdt.internal.core.pdom.PDOM;
|
||||||
import org.eclipse.cdt.internal.core.pdom.db.Database;
|
import org.eclipse.cdt.internal.core.pdom.db.Database;
|
||||||
|
@ -321,7 +322,7 @@ public class PDOMMacro implements IIndexMacro, IPDOMBinding, IASTFileLocation {
|
||||||
|
|
||||||
public char[] getExpansion() {
|
public char[] getExpansion() {
|
||||||
char[] expansionImage= getExpansionImage();
|
char[] expansionImage= getExpansionImage();
|
||||||
return MacroDefinitionParser.getExpansion(expansionImage, 0, expansionImage.length);
|
return MacroDefinitionParser.getExpansion(new CharArray(expansionImage), 0, expansionImage.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
public char[][] getParameterPlaceholderList() {
|
public char[][] getParameterPlaceholderList() {
|
||||||
|
|
Loading…
Add table
Reference in a new issue