mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-04-29 19:45:01 +02:00
Merge remote-tracking branch 'cdt/master' into sd90
This commit is contained in:
commit
af227ede75
10 changed files with 150 additions and 170 deletions
|
@ -1842,25 +1842,25 @@ public class InputType extends BuildObject implements IInputType {
|
|||
}
|
||||
|
||||
/**
|
||||
* Check if legacy scanner discovery method should be used.
|
||||
* Check if legacy scanner discovery profiles should be used.
|
||||
*/
|
||||
private boolean isLegacyScannerDiscovery() {
|
||||
boolean isLanguageSettingsProvidersEnabled = false;
|
||||
private boolean useLegacyScannerDiscoveryProfiles() {
|
||||
boolean useLegacy = true;
|
||||
ITool tool = getParent();
|
||||
if (tool!=null) {
|
||||
IBuildObject bo = tool.getParent();
|
||||
if (bo instanceof IToolChain) {
|
||||
IConfiguration cfg = ((IToolChain) bo).getParent();
|
||||
if (cfg!=null) {
|
||||
if (tool != null) {
|
||||
IBuildObject toolchain = tool.getParent();
|
||||
if (toolchain instanceof IToolChain && ((IToolChain) toolchain).getDefaultLanguageSettingsProviderIds() != null) {
|
||||
IConfiguration cfg = ((IToolChain) toolchain).getParent();
|
||||
if (cfg != null && cfg.getDefaultLanguageSettingsProviderIds() != null) {
|
||||
IResource rc = cfg.getOwner();
|
||||
if (rc!=null) {
|
||||
if (rc != null) {
|
||||
IProject project = rc.getProject();
|
||||
isLanguageSettingsProvidersEnabled = ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
|
||||
useLegacy = !ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return !isLanguageSettingsProvidersEnabled;
|
||||
return useLegacy;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1879,11 +1879,12 @@ public class InputType extends BuildObject implements IInputType {
|
|||
}
|
||||
|
||||
public String getDiscoveryProfileIdAttribute() {
|
||||
if (isLegacyScannerDiscovery()) {
|
||||
return getLegacyDiscoveryProfileIdAttribute();
|
||||
String discoveryProfileAttribute = getDiscoveryProfileIdAttributeInternal();
|
||||
if (discoveryProfileAttribute == null && useLegacyScannerDiscoveryProfiles()) {
|
||||
discoveryProfileAttribute = getLegacyDiscoveryProfileIdAttribute();
|
||||
}
|
||||
|
||||
return getDiscoveryProfileIdAttributeInternal();
|
||||
return discoveryProfileAttribute;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -89,7 +89,7 @@ public class ToolChain extends HoldsOptions implements IToolChain, IMatchKeyProv
|
|||
private String secondaryOutputIds;
|
||||
private Boolean isAbstract;
|
||||
private String defaultLanguageSettingsProviderIds;
|
||||
private String scannerConfigDiscoveryProfileId;
|
||||
private String scannerConfigDiscoveryProfileId;
|
||||
private String versionsSupported;
|
||||
private String convertToId;
|
||||
private IConfigurationElement managedIsToolChainSupportedElement = null;
|
||||
|
@ -1545,19 +1545,21 @@ public class ToolChain extends HoldsOptions implements IToolChain, IMatchKeyProv
|
|||
}
|
||||
|
||||
/**
|
||||
* Check if legacy scanner discovery method should be used.
|
||||
* Check if legacy scanner discovery profiles should be used.
|
||||
*/
|
||||
private boolean isLegacyScannerDiscovery() {
|
||||
boolean isLanguageSettingsProvidersEnabled = false;
|
||||
IConfiguration cfg = getParent();
|
||||
if (cfg != null) {
|
||||
IResource rc = cfg.getOwner();
|
||||
if (rc != null) {
|
||||
IProject project = rc.getProject();
|
||||
isLanguageSettingsProvidersEnabled = ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
|
||||
private boolean useLegacyScannerDiscoveryProfiles() {
|
||||
boolean useLegacy = true;
|
||||
if (getDefaultLanguageSettingsProviderIds() != null) {
|
||||
IConfiguration cfg = getParent();
|
||||
if (cfg != null && cfg.getDefaultLanguageSettingsProviderIds() != null) {
|
||||
IResource rc = cfg.getOwner();
|
||||
if (rc != null) {
|
||||
IProject project = rc.getProject();
|
||||
useLegacy = !ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
return !isLanguageSettingsProvidersEnabled;
|
||||
return useLegacy;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1582,11 +1584,12 @@ public class ToolChain extends HoldsOptions implements IToolChain, IMatchKeyProv
|
|||
|
||||
@Override
|
||||
public String getScannerConfigDiscoveryProfileId() {
|
||||
if (isLegacyScannerDiscovery()) {
|
||||
return getLegacyScannerConfigDiscoveryProfileId();
|
||||
String discoveryProfileId = getScannerConfigDiscoveryProfileIdInternal();
|
||||
if (discoveryProfileId == null && useLegacyScannerDiscoveryProfiles()) {
|
||||
discoveryProfileId = getLegacyScannerConfigDiscoveryProfileId();
|
||||
}
|
||||
|
||||
return getScannerConfigDiscoveryProfileIdInternal();
|
||||
return discoveryProfileId;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -533,10 +533,7 @@ public class ConfigurationDataProvider extends CConfigurationDataProvider implem
|
|||
}
|
||||
providers.add(provider);
|
||||
}
|
||||
}
|
||||
|
||||
// AG TODO - should it be when empty or when ids==null?
|
||||
if (providers.isEmpty()) {
|
||||
} else {
|
||||
providers = ScannerDiscoveryLegacySupport.getDefaultProvidersLegacy();
|
||||
}
|
||||
|
||||
|
|
|
@ -1923,10 +1923,11 @@
|
|||
|
||||
<toolChain
|
||||
archList="all"
|
||||
osList="macosx"
|
||||
id="cdt.managedbuild.toolchain.gnu.macosx.base"
|
||||
languageSettingsProviders="org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser;org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector"
|
||||
name="%ToolChainName.Macosx"
|
||||
targetTool="cdt.managedbuild.tool.macosx.c.linker.macosx.base;cdt.managedbuild.tool.macosx.cpp.linker.macosx.base;cdt.managedbuild.tool.gnu.archiver"
|
||||
id="cdt.managedbuild.toolchain.gnu.macosx.base">
|
||||
osList="macosx"
|
||||
targetTool="cdt.managedbuild.tool.macosx.c.linker.macosx.base;cdt.managedbuild.tool.macosx.cpp.linker.macosx.base;cdt.managedbuild.tool.gnu.archiver">
|
||||
<targetPlatform
|
||||
id="cdt.managedbuild.target.gnu.platform.macosx.base"
|
||||
name="%PlatformName.Dbg"
|
||||
|
@ -2053,9 +2054,10 @@
|
|||
|
||||
<toolChain
|
||||
archList="all"
|
||||
osList="solaris"
|
||||
id="cdt.managedbuild.toolchain.gnu.solaris.base"
|
||||
languageSettingsProviders="org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser;org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector"
|
||||
name="%ToolChainName.Solaris"
|
||||
osList="solaris"
|
||||
superClass="cdt.managedbuild.toolchain.gnu.base">
|
||||
<targetPlatform
|
||||
id="cdt.managedbuild.target.gnu.solaris.base"
|
||||
|
@ -3256,9 +3258,9 @@
|
|||
</projectType>
|
||||
|
||||
<configuration
|
||||
id="cdt.managedbuild.config.gnu.macosx.base"
|
||||
cleanCommand="rm -rf"
|
||||
>
|
||||
cleanCommand="rm -rf"
|
||||
id="cdt.managedbuild.config.gnu.macosx.base"
|
||||
languageSettingsProviders="org.eclipse.cdt.ui.UserLanguageSettingsProvider;org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider;${Toolchain};-org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser">
|
||||
<enablement type="CONTAINER_ATTRIBUTE"
|
||||
attribute="artifactExtension"
|
||||
value="dylib"
|
||||
|
@ -3669,8 +3671,9 @@
|
|||
</projectType>
|
||||
|
||||
<configuration
|
||||
id="cdt.managedbuild.config.gnu.solaris.base"
|
||||
cleanCommand="rm -rf">
|
||||
cleanCommand="rm -rf"
|
||||
id="cdt.managedbuild.config.gnu.solaris.base"
|
||||
languageSettingsProviders="org.eclipse.cdt.ui.UserLanguageSettingsProvider;org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider;${Toolchain};-org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser">
|
||||
<enablement type="CONTAINER_ATTRIBUTE"
|
||||
attribute="artifactExtension"
|
||||
value="so"
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.pdom;
|
||||
|
||||
|
@ -23,5 +23,4 @@ public interface IPDOM extends IIndexFragment {
|
|||
void addListener(PDOM.IListener listener);
|
||||
|
||||
void removeListener(PDOM.IListener indexView);
|
||||
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* QNX - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
* IBM Corporation
|
||||
* QNX - Initial API and implementation
|
||||
* Markus Schorn (Wind River Systems)
|
||||
* IBM Corporation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.pdom.db;
|
||||
|
||||
|
@ -26,9 +26,9 @@ final class Chunk {
|
|||
final Database fDatabase;
|
||||
final int fSequenceNumber;
|
||||
|
||||
boolean fCacheHitFlag= false;
|
||||
boolean fDirty= false;
|
||||
boolean fLocked= false; // locked chunks must not be released from cache.
|
||||
boolean fCacheHitFlag;
|
||||
boolean fDirty;
|
||||
boolean fLocked; // locked chunks must not be released from cache.
|
||||
int fCacheIndex= -1;
|
||||
|
||||
Chunk(Database db, int sequenceNumber) {
|
||||
|
@ -39,7 +39,7 @@ final class Chunk {
|
|||
void read() throws CoreException {
|
||||
try {
|
||||
final ByteBuffer buf= ByteBuffer.wrap(fBuffer);
|
||||
fDatabase.read(buf, (long)fSequenceNumber*Database.CHUNK_SIZE);
|
||||
fDatabase.read(buf, (long) fSequenceNumber*Database.CHUNK_SIZE);
|
||||
} catch (IOException e) {
|
||||
throw new CoreException(new DBStatus(e));
|
||||
}
|
||||
|
@ -48,53 +48,53 @@ final class Chunk {
|
|||
void flush() throws CoreException {
|
||||
try {
|
||||
final ByteBuffer buf= ByteBuffer.wrap(fBuffer);
|
||||
fDatabase.write(buf, (long)fSequenceNumber*Database.CHUNK_SIZE);
|
||||
fDatabase.write(buf, (long) fSequenceNumber*Database.CHUNK_SIZE);
|
||||
} catch (IOException e) {
|
||||
throw new CoreException(new DBStatus(e));
|
||||
}
|
||||
fDirty= false;
|
||||
}
|
||||
private static int recPtrToIndex( final long offset ) {
|
||||
return (int)(offset & Database.OFFSET_IN_CHUNK_MASK );
|
||||
|
||||
private static int recPtrToIndex(final long offset) {
|
||||
return (int) (offset & Database.OFFSET_IN_CHUNK_MASK);
|
||||
}
|
||||
|
||||
public void putByte(final long offset, final byte value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
fBuffer[recPtrToIndex( offset )]= value;
|
||||
fBuffer[recPtrToIndex(offset)]= value;
|
||||
}
|
||||
|
||||
public byte getByte(final long offset) {
|
||||
return fBuffer[recPtrToIndex( offset )];
|
||||
return fBuffer[recPtrToIndex(offset)];
|
||||
}
|
||||
|
||||
public byte[] getBytes(final long offset, final int length) {
|
||||
final byte[] bytes = new byte[length];
|
||||
System.arraycopy(fBuffer, recPtrToIndex( offset ), bytes, 0, length);
|
||||
System.arraycopy(fBuffer, recPtrToIndex(offset), bytes, 0, length);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public void putBytes(final long offset, final byte[] bytes) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
System.arraycopy(bytes, 0, fBuffer, recPtrToIndex( offset ), bytes.length);
|
||||
System.arraycopy(bytes, 0, fBuffer, recPtrToIndex(offset), bytes.length);
|
||||
}
|
||||
|
||||
public void putInt(final long offset, final int value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex( offset );
|
||||
int idx= recPtrToIndex(offset);
|
||||
putInt(value, fBuffer, idx);
|
||||
}
|
||||
|
||||
static final void putInt(final int value, final byte[] buffer, int idx) {
|
||||
buffer[idx]= (byte)(value >> 24);
|
||||
buffer[++idx]= (byte)(value >> 16);
|
||||
buffer[++idx]= (byte)(value >> 8);
|
||||
buffer[++idx]= (byte)(value);
|
||||
buffer[idx]= (byte) (value >> 24);
|
||||
buffer[++idx]= (byte) (value >> 16);
|
||||
buffer[++idx]= (byte) (value >> 8);
|
||||
buffer[++idx]= (byte) (value);
|
||||
}
|
||||
|
||||
|
||||
public int getInt(final long offset) {
|
||||
return getInt(fBuffer, recPtrToIndex(offset));
|
||||
}
|
||||
|
@ -106,7 +106,6 @@ final class Chunk {
|
|||
((buffer[++idx] & 0xff) << 0);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A free Record Pointer is a pointer to a raw block, i.e. the
|
||||
* pointer is not moved past the BLOCK_HEADER_SIZE.
|
||||
|
@ -119,8 +118,8 @@ final class Chunk {
|
|||
}
|
||||
|
||||
/**
|
||||
* A free Record Pointer is a pointer to a raw block, i.e. the
|
||||
* pointer is not moved past the BLOCK_HEADER_SIZE.
|
||||
* A free Record Pointer is a pointer to a raw block,
|
||||
* i.e. the pointer is not moved past the BLOCK_HEADER_SIZE.
|
||||
*/
|
||||
private static long expandToFreeRecPtr(int value) {
|
||||
/*
|
||||
|
@ -134,7 +133,6 @@ final class Chunk {
|
|||
return address << Database.BLOCK_SIZE_DELTA_BITS;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A Record Pointer is a pointer as returned by Database.malloc().
|
||||
* This is a pointer to a block + BLOCK_HEADER_SIZE.
|
||||
|
@ -165,10 +163,9 @@ final class Chunk {
|
|||
putRecPtr(value, fBuffer, idx);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A free Record Pointer is a pointer to a raw block, i.e. the
|
||||
* pointer is not moved past the BLOCK_HEADER_SIZE.
|
||||
* A free Record Pointer is a pointer to a raw block,
|
||||
* i.e. the pointer is not moved past the BLOCK_HEADER_SIZE.
|
||||
*/
|
||||
public void putFreeRecPtr(final long offset, final long value) {
|
||||
assert fLocked;
|
||||
|
@ -191,14 +188,14 @@ final class Chunk {
|
|||
public void put3ByteUnsignedInt(final long offset, final int value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex( offset );
|
||||
fBuffer[idx]= (byte)(value >> 16);
|
||||
fBuffer[++idx]= (byte)(value >> 8);
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
int idx= recPtrToIndex(offset);
|
||||
fBuffer[idx]= (byte) (value >> 16);
|
||||
fBuffer[++idx]= (byte) (value >> 8);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
|
||||
public int get3ByteUnsignedInt(final long offset) {
|
||||
int idx= recPtrToIndex( offset );
|
||||
int idx= recPtrToIndex(offset);
|
||||
return ((fBuffer[idx] & 0xff) << 16) |
|
||||
((fBuffer[++idx] & 0xff) << 8) |
|
||||
((fBuffer[++idx] & 0xff) << 0);
|
||||
|
@ -207,60 +204,60 @@ final class Chunk {
|
|||
public void putShort(final long offset, final short value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex( offset );
|
||||
fBuffer[idx]= (byte)(value >> 8);
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
int idx= recPtrToIndex(offset);
|
||||
fBuffer[idx]= (byte) (value >> 8);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
|
||||
public short getShort(final long offset) {
|
||||
int idx= recPtrToIndex( offset );
|
||||
int idx= recPtrToIndex(offset);
|
||||
return (short) (((fBuffer[idx] << 8) | (fBuffer[++idx] & 0xff)));
|
||||
}
|
||||
|
||||
public long getLong(final long offset) {
|
||||
int idx= recPtrToIndex( offset );
|
||||
return ((((long)fBuffer[idx] & 0xff) << 56) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 48) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 40) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 32) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 24) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 16) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 8) |
|
||||
(((long)fBuffer[++idx] & 0xff) << 0));
|
||||
int idx= recPtrToIndex(offset);
|
||||
return ((((long) fBuffer[idx] & 0xff) << 56) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 48) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 40) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 32) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 24) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 16) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 8) |
|
||||
(((long) fBuffer[++idx] & 0xff) << 0));
|
||||
}
|
||||
|
||||
public void putLong(final long offset, final long value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex( offset );
|
||||
int idx= recPtrToIndex(offset);
|
||||
|
||||
fBuffer[idx]= (byte)(value >> 56);
|
||||
fBuffer[++idx]= (byte)(value >> 48);
|
||||
fBuffer[++idx]= (byte)(value >> 40);
|
||||
fBuffer[++idx]= (byte)(value >> 32);
|
||||
fBuffer[++idx]= (byte)(value >> 24);
|
||||
fBuffer[++idx]= (byte)(value >> 16);
|
||||
fBuffer[++idx]= (byte)(value >> 8);
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
fBuffer[idx]= (byte) (value >> 56);
|
||||
fBuffer[++idx]= (byte) (value >> 48);
|
||||
fBuffer[++idx]= (byte) (value >> 40);
|
||||
fBuffer[++idx]= (byte) (value >> 32);
|
||||
fBuffer[++idx]= (byte) (value >> 24);
|
||||
fBuffer[++idx]= (byte) (value >> 16);
|
||||
fBuffer[++idx]= (byte) (value >> 8);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
|
||||
public void putChar(final long offset, final char value) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex( offset );
|
||||
fBuffer[idx]= (byte)(value >> 8);
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
int idx= recPtrToIndex(offset);
|
||||
fBuffer[idx]= (byte) (value >> 8);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
|
||||
public void putChars(final long offset, char[] chars, int start, int len) {
|
||||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex(offset)-1;
|
||||
final int end= start+len;
|
||||
final int end= start + len;
|
||||
for (int i = start; i < end; i++) {
|
||||
char value= chars[i];
|
||||
fBuffer[++idx]= (byte)(value >> 8);
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
fBuffer[++idx]= (byte) (value >> 8);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -268,28 +265,28 @@ final class Chunk {
|
|||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx= recPtrToIndex(offset)-1;
|
||||
final int end= start+len;
|
||||
final int end= start + len;
|
||||
for (int i = start; i < end; i++) {
|
||||
char value= chars[i];
|
||||
fBuffer[++idx]= (byte)(value);
|
||||
fBuffer[++idx]= (byte) (value);
|
||||
}
|
||||
}
|
||||
|
||||
public char getChar(final long offset) {
|
||||
int idx= recPtrToIndex( offset );
|
||||
int idx= recPtrToIndex(offset);
|
||||
return (char) (((fBuffer[idx] << 8) | (fBuffer[++idx] & 0xff)));
|
||||
}
|
||||
|
||||
public void getChars(final long offset, final char[] result, int start, int len) {
|
||||
final ByteBuffer buf= ByteBuffer.wrap(fBuffer);
|
||||
buf.position(recPtrToIndex( offset ));
|
||||
buf.position(recPtrToIndex(offset));
|
||||
buf.asCharBuffer().get(result, start, len);
|
||||
}
|
||||
|
||||
public void getCharsFromBytes(final long offset, final char[] result, int start, int len) {
|
||||
final int pos = recPtrToIndex(offset);
|
||||
for (int i = 0; i < len; i++) {
|
||||
result[start+i] = (char) (fBuffer[pos+i] & 0xff);
|
||||
result[start + i] = (char) (fBuffer[pos + i] & 0xff);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -307,8 +304,8 @@ final class Chunk {
|
|||
assert fLocked;
|
||||
fDirty= true;
|
||||
int idx = recPtrToIndex(offset);
|
||||
int i=0;
|
||||
while (i<len) {
|
||||
int i= 0;
|
||||
while (i < len) {
|
||||
fBuffer[idx++]= data[i++];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,24 +6,23 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
|
||||
package org.eclipse.cdt.internal.core.pdom.db;
|
||||
|
||||
public final class ChunkCache {
|
||||
private static ChunkCache sSharedInstance= new ChunkCache();
|
||||
|
||||
private Chunk[] fPageTable;
|
||||
private boolean fTableIsFull= false;
|
||||
private int fPointer= 0;
|
||||
private boolean fTableIsFull;
|
||||
private int fPointer;
|
||||
|
||||
public static ChunkCache getSharedInstance() {
|
||||
return sSharedInstance;
|
||||
}
|
||||
|
||||
public ChunkCache() {
|
||||
this(5*1024*1024);
|
||||
this(5 * 1024 * 1024);
|
||||
}
|
||||
|
||||
public ChunkCache(long maxSize) {
|
||||
|
@ -42,8 +41,7 @@ public final class ChunkCache {
|
|||
evictChunk();
|
||||
chunk.fCacheIndex= fPointer;
|
||||
fPageTable[fPointer]= chunk;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
chunk.fCacheIndex= fPointer;
|
||||
fPageTable[fPointer]= chunk;
|
||||
|
||||
|
@ -88,8 +86,7 @@ public final class ChunkCache {
|
|||
if (fTableIsFull) {
|
||||
fPointer= fPageTable.length-1;
|
||||
fTableIsFull= false;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
fPointer--;
|
||||
}
|
||||
chunk.fCacheIndex= -1;
|
||||
|
@ -121,9 +118,8 @@ public final class ChunkCache {
|
|||
fTableIsFull= false;
|
||||
fPointer= oldLength;
|
||||
fPageTable= newTable;
|
||||
}
|
||||
else {
|
||||
for (int i=newLength; i<oldLength; i++) {
|
||||
} else {
|
||||
for (int i= newLength; i < oldLength; i++) {
|
||||
final Chunk chunk= fPageTable[i];
|
||||
chunk.fDatabase.releaseChunk(chunk);
|
||||
chunk.fCacheIndex= -1;
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.eclipse.core.runtime.IStatus;
|
|||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.osgi.util.NLS;
|
||||
|
||||
|
||||
/**
|
||||
* Database encapsulates access to a flat binary format file with a memory-manager-like API for
|
||||
* obtaining and releasing areas of storage (memory).
|
||||
|
@ -126,8 +125,7 @@ public class Database {
|
|||
fVersion= version;
|
||||
fChunks= new Chunk[1];
|
||||
fChunksUsed = fChunksAllocated = fChunks.length;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
fHeaderChunk.read();
|
||||
fVersion= fHeaderChunk.getInt(VERSION_OFFSET);
|
||||
fChunks = new Chunk[nChunksOnDisk]; // chunk[0] is unused.
|
||||
|
@ -148,8 +146,7 @@ public class Database {
|
|||
try {
|
||||
fFile.getChannel().read(buf, position);
|
||||
return;
|
||||
}
|
||||
catch (ClosedChannelException e) {
|
||||
} catch (ClosedChannelException e) {
|
||||
// bug 219834 file may have be closed by interrupting a thread during an I/O operation.
|
||||
reopen(e, ++retries);
|
||||
}
|
||||
|
@ -158,16 +155,15 @@ public class Database {
|
|||
|
||||
void write(ByteBuffer buf, long position) throws IOException {
|
||||
int retries= 0;
|
||||
do {
|
||||
while (true) {
|
||||
try {
|
||||
fFile.getChannel().write(buf, position);
|
||||
return;
|
||||
}
|
||||
catch (ClosedChannelException e) {
|
||||
} catch (ClosedChannelException e) {
|
||||
// bug 219834 file may have be closed by interrupting a thread during an I/O operation.
|
||||
reopen(e, ++retries);
|
||||
}
|
||||
} while(true);
|
||||
}
|
||||
}
|
||||
|
||||
private void reopen(ClosedChannelException e, int attempt) throws ClosedChannelException, FileNotFoundException {
|
||||
|
@ -178,7 +174,6 @@ public class Database {
|
|||
openFile();
|
||||
}
|
||||
|
||||
|
||||
public void transferTo(FileChannel target) throws IOException {
|
||||
assert fLocked;
|
||||
final FileChannel from= fFile.getChannel();
|
||||
|
@ -222,8 +217,7 @@ public class Database {
|
|||
try {
|
||||
fHeaderChunk.flush(); // zero out header chunk
|
||||
fFile.getChannel().truncate(CHUNK_SIZE); // truncate database
|
||||
}
|
||||
catch (IOException e) {
|
||||
} catch (IOException e) {
|
||||
CCorePlugin.log(e);
|
||||
}
|
||||
malloced = freed = 0;
|
||||
|
@ -236,17 +230,17 @@ public class Database {
|
|||
* 8388608 for a file starting at 32G
|
||||
*
|
||||
*/
|
||||
long setasideChunks = Long.getLong("org.eclipse.cdt.core.parser.pdom.dense.recptr.setaside.chunks", 0 ); //$NON-NLS-1$
|
||||
if( setasideChunks != 0 ) {
|
||||
setVersion( getVersion() );
|
||||
createNewChunks( (int) setasideChunks );
|
||||
long setasideChunks = Long.getLong("org.eclipse.cdt.core.parser.pdom.dense.recptr.setaside.chunks", 0); //$NON-NLS-1$
|
||||
if (setasideChunks != 0) {
|
||||
setVersion(getVersion());
|
||||
createNewChunks((int) setasideChunks);
|
||||
flush();
|
||||
}
|
||||
}
|
||||
|
||||
private void removeChunksFromCache() {
|
||||
synchronized (fCache) {
|
||||
for (int i=1; i < fChunks.length; i++) {
|
||||
for (int i= 1; i < fChunks.length; i++) {
|
||||
Chunk chunk= fChunks[i];
|
||||
if (chunk != null) {
|
||||
fCache.remove(chunk);
|
||||
|
@ -256,7 +250,6 @@ public class Database {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the Chunk that contains the given offset.
|
||||
* @throws CoreException
|
||||
|
@ -268,7 +261,7 @@ public class Database {
|
|||
long long_index = offset / CHUNK_SIZE;
|
||||
assert long_index < Integer.MAX_VALUE;
|
||||
|
||||
synchronized(fCache) {
|
||||
synchronized (fCache) {
|
||||
assert fLocked;
|
||||
final int index = (int)long_index;
|
||||
Chunk chunk= fChunks[index];
|
||||
|
@ -276,8 +269,7 @@ public class Database {
|
|||
cacheMisses++;
|
||||
chunk = fChunks[index] = new Chunk(this, index);
|
||||
chunk.read();
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
cacheHits++;
|
||||
}
|
||||
fCache.add(chunk, fExclusiveLock);
|
||||
|
@ -359,7 +351,7 @@ public class Database {
|
|||
long address = (long) newChunkIndex * CHUNK_SIZE;
|
||||
|
||||
/*
|
||||
* non-dense pointers are at most 31 bits dense pointers are at most 35 bits Check the sizes here
|
||||
* Non-dense pointers are at most 31 bits dense pointers are at most 35 bits Check the sizes here
|
||||
* and throw an exception if the address is too large. By throwing the CoreException with the
|
||||
* special status, the indexing operation should be stopped. This is desired since generally, once
|
||||
* the max size is exceeded, there are lots of errors.
|
||||
|
@ -375,7 +367,7 @@ public class Database {
|
|||
}
|
||||
|
||||
/**
|
||||
* for testing purposes, only.
|
||||
* For testing purposes, only.
|
||||
*/
|
||||
private long createNewChunks(int numChunks) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
|
@ -383,7 +375,7 @@ public class Database {
|
|||
final int oldLen= fChunks.length;
|
||||
Chunk[] newchunks = new Chunk[oldLen+numChunks];
|
||||
System.arraycopy(fChunks, 0, newchunks, 0, oldLen);
|
||||
for( int i = oldLen; i < oldLen + numChunks; i++ ) {
|
||||
for (int i = oldLen; i < oldLen + numChunks; i++) {
|
||||
newchunks[i]= null;
|
||||
}
|
||||
final Chunk chunk= new Chunk(this, oldLen + numChunks - 1);
|
||||
|
@ -393,7 +385,7 @@ public class Database {
|
|||
fCache.add(chunk, true);
|
||||
fChunksAllocated=oldLen+numChunks;
|
||||
fChunksUsed=oldLen+numChunks;
|
||||
return (long)(oldLen + numChunks - 1) * CHUNK_SIZE;
|
||||
return (long) (oldLen + numChunks - 1) * CHUNK_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -411,10 +403,11 @@ public class Database {
|
|||
assert fExclusiveLock;
|
||||
long prevblock = chunk.getFreeRecPtr(block + BLOCK_PREV_OFFSET);
|
||||
long nextblock = chunk.getFreeRecPtr(block + BLOCK_NEXT_OFFSET);
|
||||
if (prevblock != 0)
|
||||
if (prevblock != 0) {
|
||||
putFreeRecPtr(prevblock + BLOCK_NEXT_OFFSET, nextblock);
|
||||
else // we were the head
|
||||
} else { // we were the head
|
||||
setFirstBlock(blocksize, nextblock);
|
||||
}
|
||||
|
||||
if (nextblock != 0)
|
||||
putFreeRecPtr(nextblock + BLOCK_PREV_OFFSET, prevblock);
|
||||
|
@ -543,10 +536,11 @@ public class Database {
|
|||
bytelen= 2*len;
|
||||
}
|
||||
|
||||
if (bytelen > ShortString.MAX_BYTE_LENGTH)
|
||||
if (bytelen > ShortString.MAX_BYTE_LENGTH) {
|
||||
return new LongString(this, chars, useBytes);
|
||||
else
|
||||
} else {
|
||||
return new ShortString(this, chars, useBytes);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean useBytes(char[] chars) {
|
||||
|
@ -557,11 +551,9 @@ public class Database {
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public IString getString(long offset) throws CoreException {
|
||||
final int l = getInt(offset);
|
||||
int bytelen= l<0 ? -l : 2*l;
|
||||
int bytelen= l < 0 ? -l : 2 * l;
|
||||
if (bytelen > ShortString.MAX_BYTE_LENGTH) {
|
||||
return new LongString(this, offset);
|
||||
}
|
||||
|
@ -661,24 +653,20 @@ public class Database {
|
|||
// locked chunk that has been removed from cache.
|
||||
if (chunk.fDirty) {
|
||||
dirtyChunks.add(chunk); // keep in fChunks until it is flushed.
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
chunk.fLocked= false;
|
||||
fChunks[i]= null;
|
||||
}
|
||||
}
|
||||
else if (chunk.fLocked) {
|
||||
} else if (chunk.fLocked) {
|
||||
// locked chunk, still in cache.
|
||||
if (chunk.fDirty) {
|
||||
if (flush) {
|
||||
dirtyChunks.add(chunk);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
chunk.fLocked= false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
assert !chunk.fDirty; // dirty chunks must be locked.
|
||||
}
|
||||
}
|
||||
|
@ -698,8 +686,7 @@ public class Database {
|
|||
if (fExclusiveLock) {
|
||||
try {
|
||||
giveUpExclusiveLock(true);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
setExclusiveLock();
|
||||
}
|
||||
return;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* QNX - Initial API and implementation
|
||||
* QNX - Initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.pdom.db;
|
||||
|
||||
|
@ -14,13 +14,10 @@ import org.eclipse.core.runtime.CoreException;
|
|||
|
||||
/**
|
||||
* @author Doug Schaefer
|
||||
*
|
||||
*/
|
||||
public interface IBTreeComparator {
|
||||
|
||||
/**
|
||||
* Compare two records. Used for insert.
|
||||
*/
|
||||
public abstract int compare(long record1, long record2) throws CoreException;
|
||||
|
||||
}
|
||||
|
|
|
@ -53,8 +53,8 @@ public class PDOMInclude implements IIndexFragmentInclude {
|
|||
// Cached fields
|
||||
private String fName;
|
||||
|
||||
public PDOMInclude(PDOMLinkage pdom, long record) {
|
||||
this.linkage = pdom;
|
||||
public PDOMInclude(PDOMLinkage linkage, long record) {
|
||||
this.linkage = linkage;
|
||||
this.record = record;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue