();
+ private StreamHasher hasher;
+ private long hash64;
protected LazyCharArray() {
+ hasher = new StreamHasher();
}
@Override
@@ -66,8 +70,18 @@ public abstract class LazyCharArray extends AbstractCharArray {
return true;
}
+ @Override
+ public long getContentsHash() {
+ if (hasher != null) {
+ readUpTo(Integer.MAX_VALUE);
+ hash64 = hasher.computeHash();
+ hasher = null;
+ }
+ return hash64;
+ }
+
private void readUpTo(int offset) {
- if (fLength >=0)
+ if (fLength >= 0)
return;
final int chunkOffset= offset >> CHUNK_BITS;
@@ -78,13 +92,13 @@ public abstract class LazyCharArray extends AbstractCharArray {
public final char get(int offset) {
int chunkOffset= offset >> CHUNK_BITS;
char[] data= getChunkData(chunkOffset);
- return data[offset & (CHUNK_SIZE-1)];
+ return data[offset & (CHUNK_SIZE - 1)];
}
@Override
public final void arraycopy(int offset, char[] destination, int destinationPos, int length) {
int chunkOffset= offset >> CHUNK_BITS;
- int loffset= offset & (CHUNK_SIZE-1);
+ int loffset= offset & (CHUNK_SIZE - 1);
char[] data= getChunkData(chunkOffset);
final int canCopy = data.length-loffset;
if (length <= canCopy) {
@@ -124,7 +138,7 @@ public abstract class LazyCharArray extends AbstractCharArray {
*/
protected Chunk createChunk(int chunkOffset) {
final int chunkCount = fChunks.size();
- long fileOffset= chunkCount == 0 ? 0 : fChunks.get(chunkCount-1).fFileEndOffset;
+ long fileOffset= chunkCount == 0 ? 0 : fChunks.get(chunkCount - 1).fFileEndOffset;
try {
for (int i = chunkCount; i <= chunkOffset; i++) {
long[] fileEndOffset= {0};
@@ -133,12 +147,15 @@ public abstract class LazyCharArray extends AbstractCharArray {
if (charCount == 0) {
fLength= fChunks.size() * CHUNK_SIZE;
break;
- }
+ }
+ if (hasher != null) {
+ hasher.addChunk(data);
+ }
// New chunk
Chunk chunk= new Chunk(fileOffset, fileEndOffset[0], data);
fChunks.add(chunk);
if (charCount < CHUNK_SIZE) {
- fLength= (fChunks.size()-1) * CHUNK_SIZE + charCount;
+ fLength= (fChunks.size() - 1) * CHUNK_SIZE + charCount;
break;
}
fileOffset= fileEndOffset[0];
@@ -162,8 +179,8 @@ public abstract class LazyCharArray extends AbstractCharArray {
}
/**
- * Read the chunk data at the given source offset and provide the end-offset in the
- * source.
+ * Read the chunk data at the given source offset and provide the end-offset in
+ * the source.
*/
protected abstract char[] readChunkData(long sourceOffset, long[] sourceEndOffsetHolder) throws Exception;
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/StreamHasher.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/StreamHasher.java
new file mode 100644
index 00000000000..5cbf46fae79
--- /dev/null
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/StreamHasher.java
@@ -0,0 +1,236 @@
+/*******************************************************************************
+ * Copyright (c) 2010 Google, Inc and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Sergey Prigogin (Google) - initial API and implementation
+ *
+ * Based on lookup3.c, by Bob Jenkins {@link "http://burtleburtle.net/bob/c/lookup3.c"}
+ *
+ * Here is the original comment by Bob Jenkins:
+ * -------------------------------------------------------------------------------
+ * lookup3.c, by Bob Jenkins, May 2006, Public Domain.
+ *
+ * These are functions for producing 32-bit hashes for hash table lookup.
+ * hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
+ * are externally useful functions. Routines to test the hash are included
+ * if SELF_TEST is defined. You can use this free for any purpose. It's in
+ * the public domain. It has no warranty.
+ *
+ * You probably want to use hashlittle(). hashlittle() and hashbig()
+ * hash byte arrays. hashlittle() is is faster than hashbig() on
+ * little-endian machines. Intel and AMD are little-endian machines.
+ * On second thought, you probably want hashlittle2(), which is identical to
+ * hashlittle() except it returns two 32-bit hashes for the price of one.
+ * You could implement hashbig2() if you wanted but I haven't bothered here.
+ *
+ * If you want to find a hash of, say, exactly 7 integers, do
+ * a = i1; b = i2; c = i3;
+ * mix(a, b, c);
+ * a += i4; b += i5; c += i6;
+ * mix(a, b, c);
+ * a += i7;
+ * finalMix(a, b, c);
+ * then use c as the hash value. If you have a variable length array of
+ * 4-byte integers to hash, use hashword(). If you have a byte array (like
+ * a character string), use hashlittle(). If you have several byte arrays, or
+ * a mix of things, see the comments above hashlittle().
+ *
+ * Why is this so big? I read 12 bytes at a time into 3 4-byte integers,
+ * then mix those integers. This is fast (you can do a lot more thorough
+ * mixing with 12*3 instructions on 3 integers than you can with 3 instructions
+ * on 1 byte), but shoehorning those bytes into integers efficiently is messy.
+ *******************************************************************************/
+
+package org.eclipse.cdt.internal.core.parser.scanner;
+
+/**
+ * Computes a 64-bit hash value of a character stream that can be supplied one chunk at a time.
+ * Usage:
+ *
+ * StreamHasher hasher = new StreamHasher();
+ * for (long offset = 0; offset < streamLength; offset += chunkLength) {
+ * hasher.addChunk(offset, chunkOfCharacters);
+ * }
+ * int64 hashValue = hasher.computeHash();
+ *
+ *
+ * Based on lookup3.c by Bob Jenkins from {@link "http://burtleburtle.net/bob/c/lookup3.c"}
+ */
+public final class StreamHasher {
+ private static final long SEED = 3141592653589793238L; // PI
+ private static final long EMPTY_STRING_HASH = new StreamHasher().computeHashInternal();
+
+ long hashedOffset; // Current position in the stream of characters.
+ int state; // Current position in the stream of characters modulo 6, or -1 after computeHash is called.
+ int a;
+ int b;
+ int c;
+ char previousCharacter;
+
+ public StreamHasher() {
+ // Set up the internal state.
+ hashedOffset = 0;
+ state = 0;
+ a = b = c = (int) SEED;
+ c += SEED >>> 32;
+ }
+
+ /**
+ * Adds a chunk of data to the hasher.
+ * @param chunk Contents of the chunk.
+ */
+ public void addChunk(char[] chunk) {
+ for (int pos = 0; pos < chunk.length; pos++, hashedOffset++) {
+ char cc = chunk[pos];
+ switch (state++) {
+ case -1:
+ throw new IllegalStateException("addChunk is called after computeHash."); //$NON-NLS-1$
+ case 0:
+ case 2:
+ case 4:
+ previousCharacter = cc;
+ break;
+ case 1:
+ a += previousCharacter | (cc << 16);
+ break;
+ case 3:
+ b += previousCharacter | (cc << 16);
+ break;
+ case 5:
+ c += previousCharacter | (cc << 16);
+ mix();
+ state = 0;
+ break;
+ }
+ }
+ }
+
+ /**
+ * Computes and returns the hash value. Must be called once after the last chunk.
+ * @return The hash value of the character stream.
+ */
+ public long computeHash() {
+ if (state < 0) {
+ throw new IllegalStateException("computeHash method is called more than once."); //$NON-NLS-1$
+ }
+ return computeHashInternal() ^ EMPTY_STRING_HASH;
+ }
+
+ private long computeHashInternal() {
+ switch (state) {
+ case 1:
+ a += previousCharacter;
+ break;
+ case 3:
+ b += previousCharacter;
+ break;
+ case 5:
+ c += previousCharacter;
+ break;
+ }
+ state = -1; // Protect against subsequent calls.
+ finalMix();
+ return (c & 0xFFFFFFFFL) | ((long) b << 32);
+ }
+
+ /**
+ * Computes a 64-bit hash value of a String. The resulting hash value
+ * is zero if the string is empty.
+ *
+ * @param str The string to hash.
+ * @return The hash value.
+ */
+ public static long hash(String str) {
+ StreamHasher hasher = new StreamHasher();
+ hasher.addChunk(str.toCharArray());
+ return hasher.computeHash();
+ }
+
+ /**
+ * Mixes three 32-bit values reversibly.
+ *
+ * This is reversible, so any information in a, b, c before mix() is
+ * still in a, b, c after mix().
+ *
+ * If four pairs of a, b, c inputs are run through mix(), or through
+ * mix() in reverse, there are at least 32 bits of the output that
+ * are sometimes the same for one pair and different for another pair.
+ * This was tested for:
+ * * pairs that differed by one bit, by two bits, in any combination
+ * of top bits of a, b, c, or in any combination of bottom bits of
+ * a, b, c.
+ * * "differ" is defined as +, -, ^, or ~^. For + and -, I transformed
+ * the output delta to a Gray code (a ^ (a >> 1)) so a string of 1's
+ * (as is commonly produced by subtraction) look like a single 1-bit
+ * difference.
+ * * the base values were pseudo-random, all zero but one bit set, or
+ * all zero plus a counter that starts at zero.
+ *
+ * Some k values for my "a -= c; a ^= Integer.rotateLeft(c, k); c += b;"
+ * arrangement that satisfy this are
+ * 4 6 8 16 19 4
+ * 9 15 3 18 27 15
+ * 14 9 3 7 17 3
+ * Well, "9 15 3 18 27 15" didn't quite get 32 bits diffing
+ * for "differ" defined as + with a one-bit base and a two-bit delta.
+ * I used http://burtleburtle.net/bob/hash/avalanche.html to choose
+ * the operations, constants, and arrangements of the variables.
+ *
+ * This does not achieve avalanche. There are input bits of a, b, c
+ * that fail to affect some output bits of a, b, c, especially of a.
+ * The most thoroughly mixed value is c, but it doesn't really even
+ * achieve avalanche in c.
+ *
+ * This allows some parallelism. Read-after-writes are good at doubling
+ * the number of bits affected, so the goal of mixing pulls in the opposite
+ * direction as the goal of parallelism. I did what I could. Rotates
+ * seem to cost as much as shifts on every machine I could lay my hands
+ * on, and rotates are much kinder to the top and bottom bits, so I used
+ * rotates.
+ */
+ private void mix() {
+ a -= c; a ^= Integer.rotateLeft(c, 4); c += b;
+ b -= a; b ^= Integer.rotateLeft(a, 6); a += c;
+ c -= b; c ^= Integer.rotateLeft(b, 8); b += a;
+ a -= c; a ^= Integer.rotateLeft(c, 16); c += b;
+ b -= a; b ^= Integer.rotateLeft(a, 19); a += c;
+ c -= b; c ^= Integer.rotateLeft(b, 4); b += a;
+ }
+
+ /**
+ * Final mixing of 3 32-bit values a, b, c into c
+ *
+ * Pairs of a, b, c values differing in only a few bits will usually
+ * produce values of c that look totally different. This was tested for
+ * * pairs that differed by one bit, by two bits, in any combination
+ * of top bits of a, b, c, or in any combination of bottom bits of
+ * a, b, c.
+ * * "differ" is defined as +, -, ^, or ~^. For + and -, I transformed
+ * the output delta to a Gray code (a ^ (a >> 1)) so a string of 1's (as
+ * is commonly produced by subtraction) look like a single 1-bit
+ * difference.
+ * * the base values were pseudo-random, all zero but one bit set, or
+ * all zero plus a counter that starts at zero.
+ *
+ * These constants passed:
+ * 14 11 25 16 4 14 24
+ * 12 14 25 16 4 14 24
+ * and these came close:
+ * 4 8 15 26 3 22 24
+ * 10 8 15 26 3 22 24
+ * 11 8 15 26 3 22 24
+ */
+ private void finalMix() {
+ c ^= b; c -= Integer.rotateLeft(b, 14);
+ a ^= c; a -= Integer.rotateLeft(c, 11);
+ b ^= a; b -= Integer.rotateLeft(a, 25);
+ c ^= b; c -= Integer.rotateLeft(b, 16);
+ a ^= c; a -= Integer.rotateLeft(c, 4);
+ b ^= a; b -= Integer.rotateLeft(a, 14);
+ c ^= b; c -= Integer.rotateLeft(b, 24);
+ }
+}
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/AbstractIndexerTask.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/AbstractIndexerTask.java
index e68d36f4ee9..28ed9e397b8 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/AbstractIndexerTask.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/AbstractIndexerTask.java
@@ -50,6 +50,7 @@ import org.eclipse.cdt.internal.core.index.IIndexFragment;
import org.eclipse.cdt.internal.core.index.IIndexFragmentFile;
import org.eclipse.cdt.internal.core.index.IWritableIndex;
import org.eclipse.cdt.internal.core.index.IndexBasedFileContentProvider;
+import org.eclipse.cdt.internal.core.parser.scanner.StreamHasher;
import org.eclipse.cdt.internal.core.parser.scanner.InternalFileContentProvider;
import org.eclipse.cdt.internal.core.pdom.dom.PDOMNotImplementedError;
import org.eclipse.core.runtime.Assert;
@@ -245,9 +246,8 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
- private final IASTTranslationUnit createAST(Object tu, AbstractLanguage language, IScannerInfo scanInfo, int options,
- boolean inContext, IProgressMonitor pm) throws CoreException {
- final FileContent codeReader= fResolver.getCodeReader(tu);
+ private final IASTTranslationUnit createAST(Object tu, AbstractLanguage language, FileContent codeReader,
+ IScannerInfo scanInfo, int options, boolean inContext, IProgressMonitor pm) throws CoreException {
if (codeReader == null) {
return null;
}
@@ -368,6 +368,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
IProgressMonitor monitor) throws CoreException {
final boolean forceAll= (fUpdateFlags & IIndexManager.UPDATE_ALL) != 0;
final boolean checkTimestamps= (fUpdateFlags & IIndexManager.UPDATE_CHECK_TIMESTAMPS) != 0;
+ final boolean checkFileContentsHash = (fUpdateFlags & IIndexManager.UPDATE_CHECK_CONTENTS_HASH) != 0;
final boolean checkConfig= (fUpdateFlags & IIndexManager.UPDATE_CHECK_CONFIGURATION) != 0;
int count= 0;
@@ -401,7 +402,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
if (checkConfig) {
update= isSourceUnit ? isSourceUnitConfigChange(tu, ifile) : isHeaderConfigChange(tu, ifile);
}
- update= update || force || (checkTimestamps && fResolver.getLastModified(ifl) != ifile.getTimestamp());
+ update= update || force || isModified(checkTimestamps, checkFileContentsHash, ifl, tu, ifile);
if (update) {
requestUpdate(linkageID, ifl, ifile);
store(tu, linkageID, isSourceUnit, files);
@@ -423,7 +424,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
if (checkConfig) {
update= isHeaderConfigChange(tu, ifile);
}
- update= update || force || (checkTimestamps && fResolver.getLastModified(ifl) != ifile.getTimestamp());
+ update= update || force || isModified(checkTimestamps, checkFileContentsHash, ifl, tu, ifile);
if (update) {
final int linkageID = ifile.getLinkageID();
requestUpdate(linkageID, ifl, ifile);
@@ -437,7 +438,18 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
updateRequestedFiles(count - fFilesToUpdate.length);
fFilesToUpdate= null;
}
-
+
+ private boolean isModified(boolean checkTimestamps, boolean checkFileContentsHash, IIndexFileLocation ifl,
+ Object tu, IIndexFragmentFile file) throws CoreException {
+ boolean timestampDifferent = checkTimestamps && fResolver.getLastModified(ifl) != file.getTimestamp();
+ if (timestampDifferent) {
+ if (checkFileContentsHash && computeFileContentsHash(tu) == file.getContentsHash()) {
+ return false;
+ }
+ }
+ return timestampDifferent;
+ }
+
private void requestUpdate(int linkageID, IIndexFileLocation ifl, IIndexFragmentFile ifile) {
FileKey key= new FileKey(linkageID, ifl.getURI());
IndexFileContent info= fFileInfos.get(key);
@@ -589,7 +601,8 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
}
}
- writeToIndex(linkageID, ast, computeHashCode(scanInfo), monitor);
+ writeToIndex(linkageID, ast, StreamHasher.hash(code), computeHashCode(scanInfo),
+ monitor);
updateFileCount(0, 0, 1);
}
}
@@ -734,10 +747,11 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
pm.subTask(getMessage(MessageKind.parsingFileTask,
path.lastSegment(), path.removeLastSegments(1).toString()));
long start= System.currentTimeMillis();
- IASTTranslationUnit ast= createAST(tu, lang, scanInfo, fASTOptions, inContext, pm);
+ FileContent codeReader= fResolver.getCodeReader(tu);
+ IASTTranslationUnit ast= createAST(tu, lang, codeReader, scanInfo, fASTOptions, inContext, pm);
fStatistics.fParsingTime += System.currentTimeMillis() - start;
if (ast != null) {
- writeToIndex(linkageID, ast, computeHashCode(scanInfo), pm);
+ writeToIndex(linkageID, ast, codeReader.getContentsHash(), computeHashCode(scanInfo), pm);
}
} catch (CoreException e) {
th= e;
@@ -755,8 +769,8 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
}
}
- private void writeToIndex(final int linkageID, IASTTranslationUnit ast, int configHash,
- IProgressMonitor pm) throws CoreException, InterruptedException {
+ private void writeToIndex(final int linkageID, IASTTranslationUnit ast, long fileContentsHash,
+ int configHash, IProgressMonitor pm) throws CoreException, InterruptedException {
HashSet enteredFiles= new HashSet();
ArrayList orderedIFLs= new ArrayList();
@@ -775,7 +789,7 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
IIndexFileLocation[] ifls= orderedIFLs.toArray(new IIndexFileLocation[orderedIFLs.size()]);
try {
- addSymbols(ast, ifls, fIndex, 1, false, configHash, fTodoTaskUpdater, pm);
+ addSymbols(ast, ifls, fIndex, 1, false, fileContentsHash, configHash, fTodoTaskUpdater, pm);
} finally {
// mark as updated in any case, to avoid parsing files that caused an exception to be thrown.
for (IIndexFileLocation ifl : ifls) {
@@ -940,6 +954,11 @@ public abstract class AbstractIndexerTask extends PDOMWriter {
return result * 31 + key.hashCode();
}
+ private long computeFileContentsHash(Object tu) {
+ FileContent codeReader= fResolver.getCodeReader(tu);
+ return codeReader != null ? codeReader.getContentsHash() : 0;
+ }
+
public final IndexFileContent getFileContent(int linkageID, IIndexFileLocation ifl) throws CoreException {
if (!needToUpdateHeader(linkageID, ifl)) {
IndexFileContent info= getFileInfo(linkageID, ifl);
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/IndexUpdatePolicy.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/IndexUpdatePolicy.java
index 48fe8b519e9..b06291087ed 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/IndexUpdatePolicy.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/IndexUpdatePolicy.java
@@ -140,7 +140,8 @@ public class IndexUpdatePolicy {
}
else if (fIndexer != null) {
if (oldPolicy == MANUAL) {
- task= new PDOMUpdateTask(fIndexer, IIndexManager.UPDATE_CHECK_TIMESTAMPS);
+ task= new PDOMUpdateTask(fIndexer,
+ IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONTENTS_HASH);
clearTUs();
}
else if (fKind == POST_CHANGE) {
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOM.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOM.java
index b687fe1a6d4..8c6049ef55e 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOM.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOM.java
@@ -191,10 +191,11 @@ public class PDOM extends PlatformObject implements IPDOM {
* 94.0 - new model for storing types, bug 294306.
* 95.0 - parameter packs, bug 294730.
* 96.0 - storing pack expansions in the template parameter map, bug 294730.
+ * 97.0 - storing file contents hash in PDOMFile, bug 302083.
*/
- private static final int MIN_SUPPORTED_VERSION= version(96, 0);
- private static final int MAX_SUPPORTED_VERSION= version(96, Short.MAX_VALUE);
- private static final int DEFAULT_VERSION = version(96, 0);
+ private static final int MIN_SUPPORTED_VERSION= version(97, 0);
+ private static final int MAX_SUPPORTED_VERSION= version(97, Short.MAX_VALUE);
+ private static final int DEFAULT_VERSION = version(97, 0);
private static int version(int major, int minor) {
return (major << 16) + minor;
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMManager.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMManager.java
index 79eb0631809..ee843c56018 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMManager.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMManager.java
@@ -571,7 +571,8 @@ public class PDOMManager implements IWritableIndexManager, IListener {
pdom.releaseReadLock();
}
if (resume) {
- enqueue(new PDOMUpdateTask(indexer, IIndexManager.UPDATE_CHECK_TIMESTAMPS));
+ enqueue(new PDOMUpdateTask(indexer,
+ IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONTENTS_HASH));
}
}
return;
@@ -592,7 +593,8 @@ public class PDOMManager implements IWritableIndexManager, IListener {
IPDOMIndexerTask task= null;
if (operation.wasSuccessful()) {
- task= new PDOMUpdateTask(indexer, IIndexManager.UPDATE_CHECK_TIMESTAMPS);
+ task= new PDOMUpdateTask(indexer,
+ IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONTENTS_HASH);
}
else {
task= new PDOMRebuildTask(indexer);
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMWriter.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMWriter.java
index f81cab32664..16c9686539a 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMWriter.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/PDOMWriter.java
@@ -138,15 +138,15 @@ abstract public class PDOMWriter {
}
/**
- * Extracts symbols from the given ast and adds them to the index.
+ * Extracts symbols from the given AST and adds them to the index.
*
* When flushIndex is set to false
, you must make sure to flush the
* index after your last write operation.
* @since 4.0
*/
public void addSymbols(IASTTranslationUnit ast, IIndexFileLocation[] ifls, IWritableIndex index,
- int readlockCount, boolean flushIndex, int configHash, ITodoTaskUpdater taskUpdater,
- IProgressMonitor pm) throws InterruptedException, CoreException {
+ int readlockCount, boolean flushIndex, long fileContentsHash, int configHash,
+ ITodoTaskUpdater taskUpdater, IProgressMonitor pm) throws InterruptedException, CoreException {
if (fShowProblems) {
fShowInclusionProblems= true;
fShowScannerProblems= true;
@@ -165,8 +165,8 @@ abstract public class PDOMWriter {
resolveNames(symbolMap, ifls, stati, pm);
// index update
- storeSymbolsInIndex(symbolMap, ifls, ast.getLinkage().getLinkageID(), configHash, contextIncludes,
- index, readlockCount, flushIndex, stati, pm);
+ storeSymbolsInIndex(symbolMap, ifls, ast.getLinkage().getLinkageID(), fileContentsHash,
+ configHash, contextIncludes, index, readlockCount, flushIndex, stati, pm);
if (taskUpdater != null) {
taskUpdater.updateTasks(ast.getComments(), ifls);
@@ -193,9 +193,10 @@ abstract public class PDOMWriter {
}
private void storeSymbolsInIndex(final Map symbolMap, IIndexFileLocation[] ifls,
- int linkageID, int configHash, HashSet contextIncludes,
- IWritableIndex index, int readlockCount, boolean flushIndex,
- ArrayList stati, IProgressMonitor pm) throws InterruptedException, CoreException {
+ int linkageID, long fileContentsHash, int configHash,
+ HashSet contextIncludes, IWritableIndex index, int readlockCount,
+ boolean flushIndex, ArrayList stati, IProgressMonitor pm)
+ throws InterruptedException, CoreException {
for (int i= 0; i < ifls.length; i++) {
if (pm.isCanceled())
return;
@@ -209,7 +210,8 @@ abstract public class PDOMWriter {
YieldableIndexLock lock = new YieldableIndexLock(index, readlockCount, flushIndex);
lock.acquire();
try {
- storeFileInIndex(index, ifl, symbolMap, linkageID, configHash, contextIncludes, lock);
+ storeFileInIndex(index, ifl, symbolMap, linkageID, fileContentsHash, configHash,
+ contextIncludes, lock);
} catch (RuntimeException e) {
th= e;
} catch (PDOMNotImplementedError e) {
@@ -457,9 +459,9 @@ abstract public class PDOMWriter {
}
private IIndexFragmentFile storeFileInIndex(IWritableIndex index, IIndexFileLocation location,
- Map symbolMap, int linkageID, int configHash,
- Set contextIncludes, YieldableIndexLock lock)
- throws CoreException, InterruptedException {
+ Map symbolMap, int linkageID, long fileContentsHash,
+ int configHash, Set contextIncludes,
+ YieldableIndexLock lock) throws CoreException, InterruptedException {
Set clearedContexts= Collections.emptySet();
IIndexFragmentFile file;
long timestamp = fResolver.getLastModified(location);
@@ -518,6 +520,7 @@ abstract public class PDOMWriter {
}
if (SEMI_TRANSACTIONAL_UPDATES) {
file.setTimestamp(timestamp);
+ file.setContentsHash(fileContentsHash);
file = index.commitUncommittedFile();
}
} finally {
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/dom/PDOMFile.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/dom/PDOMFile.java
index ad1fce180b3..d8cdc2cd00b 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/dom/PDOMFile.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/dom/PDOMFile.java
@@ -71,11 +71,12 @@ public class PDOMFile implements IIndexFragmentFile {
private static final int LOCATION_REPRESENTATION = 16;
private static final int LINKAGE_ID= 20;
private static final int TIME_STAMP = 24;
- private static final int SCANNER_CONFIG_HASH= 32;
- private static final int LAST_USING_DIRECTIVE= 36;
- private static final int FIRST_MACRO_REFERENCE= 40;
+ private static final int CONTENT_HASH= 32;
+ private static final int SCANNER_CONFIG_HASH= 40;
+ private static final int LAST_USING_DIRECTIVE= 44;
+ private static final int FIRST_MACRO_REFERENCE= 48;
- private static final int RECORD_SIZE= 44;
+ private static final int RECORD_SIZE= 52;
public static class Comparator implements IBTreeComparator {
private Database db;
@@ -223,6 +224,7 @@ public class PDOMFile implements IIndexFragmentFile {
}
setTimestamp(sourceFile.getTimestamp());
+ setContentsHash(sourceFile.getContentsHash());
setScannerConfigurationHashcode(sourceFile.getScannerConfigurationHashcode());
sourceFile.delete();
@@ -271,6 +273,16 @@ public class PDOMFile implements IIndexFragmentFile {
db.putLong(record + TIME_STAMP, timestamp);
}
+ public long getContentsHash() throws CoreException {
+ Database db = fLinkage.getDB();
+ return db.getLong(record + CONTENT_HASH);
+ }
+
+ public void setContentsHash(long hash) throws CoreException {
+ Database db= fLinkage.getDB();
+ db.putLong(record + CONTENT_HASH, hash);
+ }
+
public int getScannerConfigurationHashcode() throws CoreException {
Database db = fLinkage.getDB();
return db.getInt(record + SCANNER_CONFIG_HASH);
diff --git a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/indexer/PDOMIndexerTask.java b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/indexer/PDOMIndexerTask.java
index c7841f07931..e1cdf3e7770 100644
--- a/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/indexer/PDOMIndexerTask.java
+++ b/core/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/pdom/indexer/PDOMIndexerTask.java
@@ -99,7 +99,7 @@ public abstract class PDOMIndexerTask extends AbstractIndexerTask implements IPD
setIndexFilesWithoutBuildConfiguration(false);
setIndexHeadersWithoutContext(UnusedHeaderStrategy.skip);
}
- setUpdateFlags(IIndexManager.UPDATE_CHECK_TIMESTAMPS);
+ setUpdateFlags(IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONTENTS_HASH);
setForceFirstFiles(forceFiles.length);
}
diff --git a/core/org.eclipse.cdt.ui/src/org/eclipse/cdt/internal/ui/actions/UpdateIndexWithModifiedFilesAction.java b/core/org.eclipse.cdt.ui/src/org/eclipse/cdt/internal/ui/actions/UpdateIndexWithModifiedFilesAction.java
index a48312b325c..0a75159d855 100644
--- a/core/org.eclipse.cdt.ui/src/org/eclipse/cdt/internal/ui/actions/UpdateIndexWithModifiedFilesAction.java
+++ b/core/org.eclipse.cdt.ui/src/org/eclipse/cdt/internal/ui/actions/UpdateIndexWithModifiedFilesAction.java
@@ -16,6 +16,7 @@ public class UpdateIndexWithModifiedFilesAction extends AbstractUpdateIndexActio
@Override
protected int getUpdateOptions() {
- return IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONFIGURATION | IIndexManager.UPDATE_EXTERNAL_FILES_FOR_PROJECT;
+ return IIndexManager.UPDATE_CHECK_TIMESTAMPS | IIndexManager.UPDATE_CHECK_CONFIGURATION |
+ IIndexManager.UPDATE_EXTERNAL_FILES_FOR_PROJECT | IIndexManager.UPDATE_CHECK_CONTENTS_HASH;
}
}