1
0
Fork 0
mirror of https://github.com/eclipse-cdt/cdt synced 2025-04-29 19:45:01 +02:00

Improved indexer-tracing, logging info-line after rebuild index, bug 213561.

This commit is contained in:
Markus Schorn 2008-02-19 12:22:11 +00:00
parent e9e1c3b1af
commit 74593bcfb3
16 changed files with 272 additions and 122 deletions

View file

@ -54,9 +54,11 @@ public class BaseTestCase extends TestCase {
super(name);
}
@Override
protected void setUp() throws Exception {
}
@Override
protected void tearDown() throws Exception {
TestScannerProvider.clear();
}
@ -113,11 +115,12 @@ public class BaseTestCase extends TestCase {
}
}
@Override
public void runBare() throws Throwable {
final List statusLog= Collections.synchronizedList(new ArrayList());
ILogListener logListener= new ILogListener() {
public void logging(IStatus status, String plugin) {
if(!status.isOK()) {
if(!status.isOK() && status.getSeverity() != IStatus.INFO) {
statusLog.add(status);
}
}
@ -163,6 +166,7 @@ public class BaseTestCase extends TestCase {
throw testThrowable;
}
@Override
public void run( TestResult result ) {
if (!fExpectFailure || "true".equals(System.getProperty("SHOW_EXPECTED_FAILURES"))) {
super.run(result);

View file

@ -123,4 +123,9 @@ public interface IWritableIndex extends IIndex {
* Flushes all caches to the disk.
*/
void flush() throws CoreException;
/**
* Returns the size of the database in bytes.
*/
long getDatabaseSizeBytes();
}

View file

@ -80,4 +80,9 @@ public interface IWritableIndexFragment extends IIndexFragment {
* Flushes caches to disk.
*/
void flush() throws CoreException;
/**
* @return the size of the database in bytes
*/
long getDatabaseSizeBytes();
}

View file

@ -96,12 +96,13 @@ public class WritableCIndex extends CIndex implements IWritableIndex {
}
}
@Override
public synchronized void acquireReadLock() throws InterruptedException {
assert !fIsWriteLocked: "Read locks are not allowed while write-locked."; //$NON-NLS-1$
super.acquireReadLock();
}
@Override
public synchronized void releaseReadLock() {
assert !fIsWriteLocked: "Read locks are not allowed while write-locked."; //$NON-NLS-1$
super.releaseReadLock();
@ -132,4 +133,11 @@ public class WritableCIndex extends CIndex implements IWritableIndex {
assert !fIsWriteLocked;
fWritableFragment.flush();
}
/* (non-Javadoc)
* @see org.eclipse.cdt.internal.core.index.IWritableIndex#getDatabaseSizeBytes()
*/
public long getDatabaseSizeBytes() {
return fWritableFragment.getDatabaseSizeBytes();
}
}

View file

@ -108,7 +108,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
fIsFastIndexer= fastIndexer;
fFilesToUpdate= filesToUpdate;
fFilesToRemove.addAll(Arrays.asList(filesToRemove));
updateInfo(0, 0, fFilesToUpdate.length + fFilesToRemove.size());
updateRequestedFiles(fFilesToUpdate.length + fFilesToRemove.size());
}
public final void setIndexHeadersWithoutContext(boolean val) {
@ -306,7 +306,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
}
}
updateInfo(0, 0, count-fFilesToUpdate.length);
updateRequestedFiles(count-fFilesToUpdate.length);
fFilesToUpdate= null;
}
@ -399,7 +399,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
IIndexFragmentFile ifile = ifiles[i];
fIndex.clearFile(ifile, null);
}
updateInfo(0, 0, -1);
updateRequestedFiles(-1);
}
for (Iterator<IIndexFragmentFile> iterator = ifilesToRemove.iterator(); iterator.hasNext();) {
if (monitor.isCanceled()) {
@ -407,7 +407,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
IIndexFragmentFile ifile= iterator.next();
fIndex.clearFile(ifile, null);
updateInfo(0, 0, -1);
updateRequestedFiles(-1);
}
} finally {
fIndex.releaseWriteLock(1);
@ -450,7 +450,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
if (ast != null) {
writeToIndex(linkageID, ast, computeHashCode(scanInfo), monitor);
updateInfo(0, 1, 0);
updateFileCount(0, 0, 1);
}
}
}
@ -477,7 +477,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
final IScannerInfo scannerInfo= fResolver.getBuildConfiguration(linkageID, tu);
parseFile(tu, linkageID, ifl, scannerInfo, monitor);
if (info.fIsUpdated) {
updateInfo(1, 0, 0); // a source file was parsed
updateFileCount(1, 0, 0); // a source file was parsed
}
}
}
@ -502,7 +502,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
info.fRequestIsCounted= false;
parseFile(tu, linkageID, fResolver.resolveFile(tu), scannerInfo, monitor);
if (info.fIsUpdated) {
updateInfo(0, 1, 0); // a header was parsed in context
updateFileCount(0, 0, 1); // a header was parsed in context
iter.remove();
}
}
@ -527,7 +527,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
final IScannerInfo scannerInfo= fResolver.getBuildConfiguration(linkageID, header);
parseFile(header, linkageID, ifl, scannerInfo, monitor);
if (info.fIsUpdated) {
updateInfo(0, 1, -1); // a header was parsed without context
updateFileCount(0, 1, 1); // a header was parsed without context
iter.remove();
}
}
@ -666,7 +666,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
final boolean needUpdate= !info.fIsUpdated && info.fRequestUpdate;
if (needUpdate && info.fRequestIsCounted) {
updateInfo(0, 0, -1);
updateFileCount(0, 1, 0); // total headers will be counted when written to db
info.fRequestIsCounted= false;
}
return needUpdate;

View file

@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright (c) 2007 Wind River Systems, Inc. and others.
* Copyright (c) 2007, 2008 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@ -13,26 +13,23 @@ package org.eclipse.cdt.internal.core.pdom;
public class IndexerProgress {
public int fTotalSourcesEstimate;
public int fCompletedHeaders;
public int fRequestedFilesCount;
public int fCompletedSources;
public int fTimeEstimate;
public int fPrimaryHeaderCount; // headers parsed that were actually requested
public int fCompletedHeaders; // all headers including those found through inclusions
public int fTimeEstimate; // fall-back for the time where no file-count is available
public IndexerProgress() {
}
public IndexerProgress(IndexerProgress info) {
fTotalSourcesEstimate= info.fTotalSourcesEstimate;
fCompletedHeaders= info.fCompletedHeaders;
fRequestedFilesCount= info.fRequestedFilesCount;
fCompletedSources= info.fCompletedSources;
fCompletedHeaders= info.fCompletedHeaders;
fPrimaryHeaderCount= info.fPrimaryHeaderCount;
}
public int getRemainingSources() {
return fTotalSourcesEstimate-fCompletedSources;
}
public int getTimeEstimate() {
return fTotalSourcesEstimate > 0 ? fTotalSourcesEstimate : fTimeEstimate;
public int getEstimatedTicks() {
return fRequestedFilesCount > 0 ? fRequestedFilesCount : fTimeEstimate;
}
}

View file

@ -149,8 +149,7 @@ public class PDOMManager implements IWritableIndexManager, IListener {
private PDOMIndexerJob fIndexerJob;
private IPDOMIndexerTask fCurrentTask;
private LinkedList<IPDOMIndexerTask> fTaskQueue = new LinkedList<IPDOMIndexerTask>();
private int fCompletedSources;
private int fCompletedHeaders;
private int fSourceCount, fHeaderCount, fTickCount;
/**
* Stores mapping from pdom to project, used to serialize\ creation of new pdoms.
@ -605,8 +604,7 @@ public class PDOMManager implements IWritableIndexManager, IListener {
fTaskQueue.addLast(subjob);
}
if (fIndexerJob == null) {
fCompletedSources= 0;
fCompletedHeaders= 0;
fSourceCount= fHeaderCount= fTickCount= 0;
fIndexerJob = new PDOMIndexerJob(this);
fIndexerJob.setRule(INDEXER_SCHEDULING_RULE);
fIndexerJob.schedule();
@ -637,8 +635,10 @@ public class PDOMManager implements IWritableIndexManager, IListener {
else {
if (fCurrentTask != null) {
IndexerProgress info= fCurrentTask.getProgressInformation();
fCompletedSources+= info.fCompletedSources;
fCompletedHeaders+= info.fCompletedHeaders;
fSourceCount+= info.fCompletedSources;
fHeaderCount+= info.fCompletedHeaders;
// for the ticks we don't consider additional headers
fTickCount+= info.fCompletedSources + info.fPrimaryHeaderCount;
}
result= fCurrentTask= fTaskQueue.removeFirst();
}
@ -1033,43 +1033,44 @@ public class PDOMManager implements IWritableIndexManager, IListener {
int getMonitorMessage(IProgressMonitor monitor, int currentTicks, int base) {
assert !Thread.holdsLock(fTaskQueueMutex);
int remainingSources= 0;
int completedSources= 0;
int completedHeaders= 0;
int totalEstimate= 0;
int sourceCount, sourceEstimate, headerCount, tickCount, tickEstimate;
String detail= null;
IndexerProgress info;
synchronized (fTaskQueueMutex) {
completedHeaders= fCompletedHeaders;
completedSources= fCompletedSources;
totalEstimate= fCompletedHeaders+fCompletedSources;
for (Iterator<IPDOMIndexerTask> iter = fTaskQueue.iterator(); iter.hasNext();) {
IPDOMIndexerTask task = iter.next();
info= task.getProgressInformation();
remainingSources+= info.getRemainingSources();
totalEstimate+= info.getTimeEstimate();
// add historic data
sourceCount= sourceEstimate= fSourceCount;
headerCount= fHeaderCount;
tickCount= tickEstimate= fTickCount;
// add future data
for (IPDOMIndexerTask task : fTaskQueue) {
final IndexerProgress info= task.getProgressInformation();
sourceEstimate+= info.fRequestedFilesCount;
tickEstimate+= info.getEstimatedTicks();
}
// add current data
if (fCurrentTask != null) {
info= fCurrentTask.getProgressInformation();
remainingSources+= info.getRemainingSources();
completedHeaders+= info.fCompletedHeaders;
completedSources+= info.fCompletedSources;
final IndexerProgress info= fCurrentTask.getProgressInformation();
sourceCount+= info.fCompletedSources;
sourceEstimate+= info.fRequestedFilesCount-info.fPrimaryHeaderCount;
headerCount+= info.fCompletedHeaders;
// for the ticks we don't consider additional headers
tickCount+= info.fCompletedSources + info.fPrimaryHeaderCount;
tickEstimate+= info.getEstimatedTicks();
detail= PDOMIndexerJob.sMonitorDetail;
totalEstimate+= info.getTimeEstimate();
}
}
int totalSources = remainingSources+completedSources;
String msg= MessageFormat.format(Messages.PDOMManager_indexMonitorDetail, new Object[] {
new Integer(completedSources), new Integer(totalSources),
new Integer(completedHeaders)});
new Integer(sourceCount), new Integer(sourceEstimate),
new Integer(headerCount)});
if (detail != null) {
msg= msg+ ": " + detail; //$NON-NLS-1$
}
monitor.subTask(msg);
if (completedSources > 0 && totalEstimate >= completedSources) {
int newTick= completedSources*base/totalEstimate;
if (tickCount > 0 && tickCount <= tickEstimate) {
int newTick= tickCount*base/tickEstimate;
if (newTick > currentTicks) {
monitor.worked(newTick-currentTicks);
return newTick;

View file

@ -205,7 +205,7 @@ abstract public class PDOMWriter {
break;
}
if (i<ifls.length-1) {
updateInfo(0, 1, 0); // update header count
updateFileCount(0, 0, 1); // update header count
}
}
}
@ -461,13 +461,21 @@ abstract public class PDOMWriter {
/**
* Updates current progress information with the provided delta.
* @since 4.0
*/
protected final void updateInfo(int completedSources, int completedHeaders, int totalEstimate) {
protected final void updateFileCount(int sources, int primaryHeader, int header) {
synchronized(fInfo) {
fInfo.fCompletedHeaders+= completedHeaders;
fInfo.fCompletedSources+= completedSources;
fInfo.fTotalSourcesEstimate+= totalEstimate;
fInfo.fCompletedSources+= sources;
fInfo.fPrimaryHeaderCount+= primaryHeader;
fInfo.fCompletedHeaders+= header;
}
}
/**
* Updates current progress information with the provided delta.
*/
protected final void updateRequestedFiles(int delta) {
synchronized(fInfo) {
fInfo.fRequestedFilesCount+= delta;
}
}

View file

@ -55,6 +55,7 @@ public class WritablePDOM extends PDOM implements IWritableIndexFragment {
fPathResolver= resolver;
}
@Override
public IIndexFragmentFile addFile(int linkageID, IIndexFileLocation location) throws CoreException {
return super.addFile(linkageID, location);
}
@ -81,10 +82,12 @@ public class WritablePDOM extends PDOM implements IWritableIndexFragment {
((PDOMFile) file).clear(contextsRemoved);
}
@Override
public void clear() throws CoreException {
super.clear();
}
@Override
public void flush() throws CoreException {
super.flush();
}
@ -175,6 +178,7 @@ public class WritablePDOM extends PDOM implements IWritableIndexFragment {
fCreatedFromScratch = createdFromScratch;
}
@Override
protected final boolean isPermanentlyReadOnly() {
return false;
}
@ -188,5 +192,10 @@ public class WritablePDOM extends PDOM implements IWritableIndexFragment {
return null;
}
/* (non-Javadoc)
* @see org.eclipse.cdt.internal.core.index.IWritableIndexFragment#getDatabaseSizeBytes()
*/
public long getDatabaseSizeBytes() {
return getDB().getSizeBytes();
}
}

View file

@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright (c) 2005, 2007 QNX Software Systems and others.
* Copyright (c) 2005, 2008 QNX Software Systems and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@ -488,7 +488,7 @@ public class Database {
public void giveUpExclusiveLock(final boolean flush) throws CoreException {
if (fExclusiveLock) {
try {
ArrayList dirtyChunks= new ArrayList();
ArrayList<Chunk> dirtyChunks= new ArrayList<Chunk>();
synchronized (fCache) {
for (int i= 1; i < fChunks.length; i++) {
Chunk chunk= fChunks[i];
@ -542,7 +542,7 @@ public class Database {
}
// be careful as other readers may access chunks concurrently
ArrayList dirtyChunks= new ArrayList();
ArrayList<Chunk> dirtyChunks= new ArrayList<Chunk>();
synchronized (fCache) {
for (int i= 1; i < fChunks.length ; i++) {
Chunk chunk= fChunks[i];
@ -556,7 +556,7 @@ public class Database {
flushAndUnlockChunks(dirtyChunks, true);
}
private void flushAndUnlockChunks(final ArrayList dirtyChunks, boolean isComplete) throws CoreException {
private void flushAndUnlockChunks(final ArrayList<Chunk> dirtyChunks, boolean isComplete) throws CoreException {
assert !Thread.holdsLock(fCache);
synchronized(fHeaderChunk) {
if (!fHeaderChunk.fDirty) {
@ -566,8 +566,8 @@ public class Database {
}
if (!dirtyChunks.isEmpty()) {
markFileIncomplete();
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
for (Iterator<Chunk> it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = it.next();
if (chunk.fDirty) {
chunk.flush();
}
@ -575,8 +575,8 @@ public class Database {
// only after the chunks are flushed we may unlock and release them.
synchronized (fCache) {
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
for (Iterator<Chunk> it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = it.next();
chunk.fLocked= false;
if (chunk.fCacheIndex < 0) {
fChunks[chunk.fSequenceNumber]= null;
@ -618,4 +618,12 @@ public class Database {
public long getCacheMisses() {
return cacheMisses;
}
public long getSizeBytes() {
try {
return fFile.length();
} catch (IOException e) {
}
return 0;
}
}

View file

@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright (c) 2006, 2007 Wind River Systems, Inc. and others.
* Copyright (c) 2006, 2008 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@ -18,6 +18,7 @@ public class Messages extends NLS {
public static String PDOMImportTask_errorInvalidArchive;
public static String PDOMImportTask_errorInvalidPDOMVersion;
public static String PDOMIndexerTask_collectingFilesTask;
public static String PDOMIndexerTask_indexerInfo;
public static String TodoTaskUpdater_DeleteJob;
public static String TodoTaskUpdater_taskFormat;
public static String TodoTaskUpdater_UpdateJob;

View file

@ -13,6 +13,7 @@
package org.eclipse.cdt.internal.core.pdom.indexer;
import java.text.NumberFormat;
import java.util.Calendar;
import org.eclipse.cdt.core.CCorePlugin;
import org.eclipse.cdt.core.dom.ILinkage;
@ -37,8 +38,11 @@ import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.content.IContentType;
import org.eclipse.osgi.util.NLS;
/**
* Configures the abstract indexer task suitable for indexing projects.
@ -47,6 +51,7 @@ public abstract class PDOMIndexerTask extends AbstractIndexerTask implements IPD
private static final String TRUE = "true"; //$NON-NLS-1$
private AbstractPDOMIndexer fIndexer;
private boolean fWriteInfoToLog;
protected PDOMIndexerTask(ITranslationUnit[] addFiles, ITranslationUnit[] updateFiles, ITranslationUnit[] removeFiles,
AbstractPDOMIndexer indexer, boolean isFastIndexer) {
@ -92,7 +97,7 @@ public abstract class PDOMIndexerTask extends AbstractIndexerTask implements IPD
public final void run(IProgressMonitor monitor) throws InterruptedException {
long start = System.currentTimeMillis();
runTask(monitor);
traceEnd(start, fIndex);
traceEnd(start, fIndex, monitor.isCanceled());
}
/**
@ -181,57 +186,140 @@ public abstract class PDOMIndexerTask extends AbstractIndexerTask implements IPD
return new TodoTaskUpdater();
}
protected void traceEnd(long start, IWritableIndex index) {
protected void traceEnd(long start, IWritableIndex index, boolean wasCancelled) {
// log entry
if (fWriteInfoToLog && !wasCancelled && index != null) {
final long totalTime = System.currentTimeMillis() - start;
final IndexerProgress info= getProgressInformation();
final int sum= fStatistics.fDeclarationCount+fStatistics.fReferenceCount+fStatistics.fProblemBindingCount;
final double problemPct= sum==0 ? 0.0 : (double) fStatistics.fProblemBindingCount / (double) sum;
NumberFormat nfGroup= NumberFormat.getNumberInstance();
nfGroup.setGroupingUsed(true);
NumberFormat nfPercent= NumberFormat.getPercentInstance();
nfPercent.setMaximumFractionDigits(2);
nfPercent.setMinimumFractionDigits(2);
NumberFormat nfTime= NumberFormat.getNumberInstance();
nfTime.setMaximumFractionDigits(2);
nfTime.setMinimumFractionDigits(2);
nfTime.setGroupingUsed(true);
final String msg= NLS.bind(Messages.PDOMIndexerTask_indexerInfo,
new Object[] {
getCProject().getElementName(),
nfGroup.format(info.fCompletedSources),
nfGroup.format(info.fCompletedHeaders),
nfTime.format((double) totalTime/1000),
nfGroup.format(fStatistics.fDeclarationCount),
nfGroup.format(fStatistics.fReferenceCount),
nfGroup.format(fStatistics.fUnresolvedIncludesCount),
nfGroup.format(fStatistics.fPreprocessorProblemCount + fStatistics.fSyntaxProblemsCount),
nfGroup.format(fStatistics.fProblemBindingCount),
nfPercent.format(problemPct)
}
);
CCorePlugin.getDefault().getLog().log(new Status(IStatus.INFO, CCorePlugin.PLUGIN_ID, msg));
}
// tracing
if (checkDebugOption(IPDOMIndexerTask.TRACE_STATISTICS, TRUE)) {
IndexerProgress info= getProgressInformation();
String ident= " "; //$NON-NLS-1$
final long totalTime = System.currentTimeMillis() - start;
final IndexerProgress info= getProgressInformation();
final int sum= fStatistics.fDeclarationCount+fStatistics.fReferenceCount+fStatistics.fProblemBindingCount;
final double problemPct= sum==0 ? 0.0 : (double) fStatistics.fProblemBindingCount / (double) sum;
String kind= getIndexer().getClass().getName();
kind= kind.substring(kind.lastIndexOf('.')+1);
String name= " "; //$NON-NLS-1$
final long dbSize= index.getDatabaseSizeBytes();
System.out.println("C/C++ Indexer: Project '" + getProject().getElementName() //$NON-NLS-1$
+ "' (" + info.fCompletedSources + " sources, " //$NON-NLS-1$ //$NON-NLS-2$
+ "' (" + info.fCompletedSources + " sources, " //$NON-NLS-1$//$NON-NLS-2$
+ info.fCompletedHeaders + " headers)"); //$NON-NLS-1$
boolean allFiles= getIndexAllFiles();
boolean skipRefs= checkProperty(IndexerPreferences.KEY_SKIP_ALL_REFERENCES);
boolean skipTypeRefs= skipRefs || checkProperty(IndexerPreferences.KEY_SKIP_TYPE_REFERENCES);
System.out.println(name + " Options: " //$NON-NLS-1$
System.out.println(ident + " Options: " //$NON-NLS-1$
+ "indexer='" + kind //$NON-NLS-1$
+ "', parseAllFiles=" + allFiles //$NON-NLS-1$
+ ", skipReferences=" + skipRefs //$NON-NLS-1$
+ ", skipTypeReferences=" + skipTypeRefs //$NON-NLS-1$
+ "."); //$NON-NLS-1$
System.out.println(name + " Timings: " //$NON-NLS-1$
+ (System.currentTimeMillis() - start) + " total, " //$NON-NLS-1$
System.out.println(ident + " Database: " + dbSize + " bytes"); //$NON-NLS-1$ //$NON-NLS-2$
System.out.println(ident + " Timings: " //$NON-NLS-1$
+ totalTime + " total, " //$NON-NLS-1$
+ fStatistics.fParsingTime + " parser, " //$NON-NLS-1$
+ fStatistics.fResolutionTime + " resolution, " //$NON-NLS-1$
+ fStatistics.fAddToIndexTime + " index update."); //$NON-NLS-1$
System.out.println(name + " Errors: " //$NON-NLS-1$
System.out.println(ident + " Errors: " //$NON-NLS-1$
+ fStatistics.fErrorCount + " internal, " //$NON-NLS-1$
+ fStatistics.fUnresolvedIncludesCount + " include, " //$NON-NLS-1$
+ fStatistics.fPreprocessorProblemCount + " scanner, " //$NON-NLS-1$
+ fStatistics.fSyntaxProblemsCount + " syntax errors."); //$NON-NLS-1$
int sum= fStatistics.fDeclarationCount+fStatistics.fReferenceCount+fStatistics.fProblemBindingCount;
double problemPct= sum==0 ? 0.0 : (double) fStatistics.fProblemBindingCount / (double) sum;
NumberFormat nf= NumberFormat.getPercentInstance();
nf.setMaximumFractionDigits(2);
nf.setMinimumFractionDigits(2);
System.out.println(name + " Names: " //$NON-NLS-1$
NumberFormat nfPercent= NumberFormat.getPercentInstance();
nfPercent.setMaximumFractionDigits(2);
nfPercent.setMinimumFractionDigits(2);
System.out.println(ident + " Names: " //$NON-NLS-1$
+ fStatistics.fDeclarationCount + " declarations, " //$NON-NLS-1$
+ fStatistics.fReferenceCount + " references, " //$NON-NLS-1$
+ fStatistics.fProblemBindingCount + "(" + nf.format(problemPct) + ") unresolved."); //$NON-NLS-1$ //$NON-NLS-2$
+ fStatistics.fProblemBindingCount + "(" + nfPercent.format(problemPct) + ") unresolved."); //$NON-NLS-1$ //$NON-NLS-2$
if (index != null) {
long misses= index.getCacheMisses();
long hits= index.getCacheHits();
long tries= misses+hits;
double missPct= tries==0 ? 0.0 : (double) misses / (double) tries;
nf.setMinimumFractionDigits(4);
nf.setMaximumFractionDigits(4);
System.out.println(name + " Cache[" //$NON-NLS-1$
System.out.println(ident + " Cache[" //$NON-NLS-1$
+ ChunkCache.getSharedInstance().getMaxSize() / 1024 / 1024 + "mb]: " + //$NON-NLS-1$
+ hits + " hits, " //$NON-NLS-1$
+ misses + "(" + nf.format(missPct)+ ") misses."); //$NON-NLS-1$//$NON-NLS-2$
+ misses + "(" + nfPercent.format(missPct)+ ") misses."); //$NON-NLS-1$ //$NON-NLS-2$
if ("true".equals(System.getProperty("SHOW_COMPRESSED_INDEXER_INFO"))) { //$NON-NLS-1$ //$NON-NLS-2$
Calendar cal = Calendar.getInstance();
NumberFormat twoDigits= NumberFormat.getNumberInstance();
twoDigits.setMinimumIntegerDigits(2);
NumberFormat nfGroup= NumberFormat.getNumberInstance();
nfGroup.setGroupingUsed(true);
final String sep0 = "|"; //$NON-NLS-1$
final String sep = "| "; //$NON-NLS-1$
final String sec = "s"; //$NON-NLS-1$
final String mb = "mb"; //$NON-NLS-1$
final String million = "M"; //$NON-NLS-1$
System.out.print(sep0);
System.out.print(cal.get(Calendar.YEAR) + twoDigits.format(cal.get(Calendar.MONTH)+1) + twoDigits.format(cal.get(Calendar.DAY_OF_MONTH)+1));
System.out.print(sep);
System.out.print(nfGroup.format(info.fCompletedSources));
System.out.print(sep);
System.out.print(nfGroup.format(info.fCompletedHeaders));
System.out.print(sep);
System.out.print(nfGroup.format((totalTime+500)/1000) + sec);
System.out.print(sep);
System.out.print(nfGroup.format((fStatistics.fParsingTime+500)/1000) + sec);
System.out.print(sep);
System.out.print(nfGroup.format((fStatistics.fResolutionTime+500)/1000) + sec);
System.out.print(sep);
System.out.print(nfGroup.format((fStatistics.fAddToIndexTime+500)/1000) + sec);
System.out.print(sep);
System.out.print(nfGroup.format((dbSize+1024*512)/1024/1024) + mb);
System.out.print(sep);
System.out.print(nfGroup.format((tries+1000*500)/1000000) + million);
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fDeclarationCount));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fReferenceCount));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fProblemBindingCount));
System.out.print(sep);
System.out.print(nfPercent.format(problemPct));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fErrorCount));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fUnresolvedIncludesCount));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fPreprocessorProblemCount));
System.out.print(sep);
System.out.print(nfGroup.format(fStatistics.fSyntaxProblemsCount));
System.out.println(sep0);
}
}
}
}
@ -239,4 +327,8 @@ public abstract class PDOMIndexerTask extends AbstractIndexerTask implements IPD
protected ICProject getCProject() {
return fIndexer.project;
}
public void setWriteInfoToLog() {
fWriteInfoToLog= true;
}
}

View file

@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright (c) 2007 Wind River Systems, Inc. and others.
* Copyright (c) 2007, 2008 Wind River Systems, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@ -103,20 +103,20 @@ public class PDOMRebuildTask implements IPDOMIndexerTask {
private synchronized void createDelegate(ICProject project, IProgressMonitor monitor) throws CoreException {
boolean allFiles= TRUE.equals(fIndexer.getProperty(IndexerPreferences.KEY_INDEX_ALL_FILES));
List sources= new ArrayList();
List headers= allFiles ? sources : null;
List<ITranslationUnit> sources= new ArrayList<ITranslationUnit>();
List<ITranslationUnit> headers= allFiles ? sources : null;
TranslationUnitCollector collector= new TranslationUnitCollector(sources, headers, monitor);
project.accept(collector);
ITranslationUnit[] tus= (ITranslationUnit[]) sources.toArray(new ITranslationUnit[sources.size()]);
ITranslationUnit[] tus= sources.toArray(new ITranslationUnit[sources.size()]);
fDelegate= fIndexer.createTask(tus, NO_TUS, NO_TUS);
if (fDelegate instanceof PDOMIndexerTask) {
final PDOMIndexerTask delegate = (PDOMIndexerTask) fDelegate;
delegate.setUpdateFlags(IIndexManager.UPDATE_ALL);
delegate.setParseUpFront();
delegate.setWriteInfoToLog();
}
}
public synchronized IndexerProgress getProgressInformation() {
return fDelegate != null ? fDelegate.getProgressInformation() : fProgress;
}

View file

@ -1,5 +1,5 @@
###############################################################################
# Copyright (c) 2006, 2007 Wind River Systems, Inc. and others.
# Copyright (c) 2006, 2008 Wind River Systems, Inc. and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
@ -9,6 +9,7 @@
# Markus Schorn (Wind River Systems)
###############################################################################
PDOMIndexerTask_collectingFilesTask=Collecting files (project ''{0}'')
PDOMIndexerTask_indexerInfo=Indexed ''{0}'' ({1} sources, {2} headers) in {3} sec: {4} declarations; {5} references; {6} unresolved inclusions; {7} syntax errors; {8} unresolved names ({9})
PDOMImportTask_errorInvalidPDOMVersion=The version of the cdt-index to import for project {0} does not match
PDOMImportTask_errorInvalidArchive=Invalid Archive: {0}

View file

@ -64,6 +64,21 @@ public class BaseUITestCase extends BaseTestCase {
super(name);
}
/* (non-Javadoc)
* @see org.eclipse.cdt.core.testplugin.util.BaseTestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
}
/* (non-Javadoc)
* @see org.eclipse.cdt.core.testplugin.util.BaseTestCase#tearDown()
*/
protected void tearDown() throws Exception {
runEventQueue(0);
super.tearDown();
}
/**
* Reads a section in comments form the source of the given class. Fully
* equivalent to <code>readTaggedComment(getClass(), tag)</code>
@ -305,9 +320,9 @@ public class BaseUITestCase extends BaseTestCase {
final protected TreeItem checkTreeNode(Tree tree, int i0, int i1, String label) {
TreeItem item= null;
TreeItem root= tree.getItem(i0);
String itemText= null;
for (int i=0; i<400; i++) {
TreeItem root= tree.getItem(i0);
try {
TreeItem firstItem= root.getItem(0);
final String text= firstItem.getText();
@ -331,7 +346,6 @@ public class BaseUITestCase extends BaseTestCase {
}
catch (SWTException e) {
// widget was disposed, try again.
root= tree.getItem(i0);
}
runEventQueue(10);
}

View file

@ -163,7 +163,4 @@ public class TypeHierarchyAcrossProjectsTest extends TypeHierarchyBaseTest {
assertEquals(0, item4.getItemCount());
checkMethodTable(new String[] {"field4", "method4()"});
}
public void testDummy() {
}
}