mirror of
https://github.com/eclipse-cdt/cdt
synced 2025-06-07 09:46:02 +02:00
Cosmetics.
This commit is contained in:
parent
5c26443c41
commit
83d83c5165
5 changed files with 183 additions and 192 deletions
|
@ -56,7 +56,7 @@ public class DBTest extends BaseTestCase {
|
|||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
db.close();
|
||||
if(!db.getLocation().delete()) {
|
||||
if (!db.getLocation().delete()) {
|
||||
db.getLocation().deleteOnExit();
|
||||
}
|
||||
db= null;
|
||||
|
@ -87,15 +87,15 @@ public class DBTest extends BaseTestCase {
|
|||
try {
|
||||
new Database(tmp, ChunkCache.getSharedInstance(), 0, false);
|
||||
fail("A readonly file should not be openable with write-access");
|
||||
} catch(CoreException ioe) {
|
||||
} catch (CoreException e) {
|
||||
// we expect to get a failure here
|
||||
}
|
||||
|
||||
/* check opening a readonly file for read access does not fail */
|
||||
try {
|
||||
new Database(tmp, ChunkCache.getSharedInstance(), 0, true);
|
||||
} catch(CoreException ce) {
|
||||
fail("A readonly file should be readable by a permanently readonly database "+ce);
|
||||
} catch (CoreException e) {
|
||||
fail("A readonly file should be readable by a permanently readonly database "+e);
|
||||
}
|
||||
} finally {
|
||||
tmp.delete(); // this may be pointless on some platforms
|
||||
|
@ -104,7 +104,7 @@ public class DBTest extends BaseTestCase {
|
|||
|
||||
public void testFreeBlockLinking() throws Exception {
|
||||
final int realsize = 42;
|
||||
final int deltas = (realsize+Database.BLOCK_HEADER_SIZE + Database.BLOCK_SIZE_DELTA - 1) / Database.BLOCK_SIZE_DELTA;
|
||||
final int deltas = (realsize + Database.BLOCK_HEADER_SIZE + Database.BLOCK_SIZE_DELTA - 1) / Database.BLOCK_SIZE_DELTA;
|
||||
final int blocksize = deltas * Database.BLOCK_SIZE_DELTA;
|
||||
final int freeDeltas= Database.MIN_BLOCK_DELTAS-deltas;
|
||||
|
||||
|
@ -150,7 +150,6 @@ public class DBTest extends BaseTestCase {
|
|||
public long getRecord() {
|
||||
return record;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testStringsInBTree() throws Exception {
|
||||
|
@ -194,6 +193,7 @@ public class DBTest extends BaseTestCase {
|
|||
return string1.compare(string2, true);
|
||||
}
|
||||
};
|
||||
|
||||
BTree btree = new BTree(db, Database.DATA_AREA, comparator);
|
||||
for (int i = 0; i < names.length; ++i) {
|
||||
String name = names[i];
|
||||
|
@ -224,8 +224,8 @@ public class DBTest extends BaseTestCase {
|
|||
assertCMP("", EQ, "", true);
|
||||
assertCMP("", EQ, "", false);
|
||||
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH/2, r, true);
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH/2, r, false);
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH / 2, r, true);
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH / 2, r, false);
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH, r, true);
|
||||
doTrials(1000, 1, ShortString.MAX_BYTE_LENGTH, r, false);
|
||||
|
||||
|
@ -243,13 +243,13 @@ public class DBTest extends BaseTestCase {
|
|||
|
||||
public void testLongStringComparison() throws CoreException {
|
||||
Random r= new Random(314159265);
|
||||
doTrials(100, ShortString.MAX_BYTE_LENGTH+1, ShortString.MAX_BYTE_LENGTH*2, r, true);
|
||||
doTrials(100, ShortString.MAX_BYTE_LENGTH+1, ShortString.MAX_BYTE_LENGTH*2, r, false);
|
||||
doTrials(100, ShortString.MAX_BYTE_LENGTH + 1, ShortString.MAX_BYTE_LENGTH * 2, r, true);
|
||||
doTrials(100, ShortString.MAX_BYTE_LENGTH + 1, ShortString.MAX_BYTE_LENGTH * 2, r, false);
|
||||
}
|
||||
|
||||
private void doTrials(int n, int min, int max, Random r, boolean caseSensitive) throws CoreException {
|
||||
long start = System.currentTimeMillis();
|
||||
for(int i=0; i<n; i++) {
|
||||
for(int i= 0; i < n; i++) {
|
||||
String a = randomString(min, max, r);
|
||||
String b = randomString(min, max, r);
|
||||
int expected = caseSensitive ? a.compareTo(b) : a.compareToIgnoreCase(b);
|
||||
|
@ -262,7 +262,7 @@ public class DBTest extends BaseTestCase {
|
|||
private String randomString(int min, int max, Random r) {
|
||||
StringBuffer result = new StringBuffer();
|
||||
int len = min + r.nextInt(max-min);
|
||||
for(int i=0; i<len; i++) {
|
||||
for(int i= 0; i < len; i++) {
|
||||
result.append(randomChar(r));
|
||||
}
|
||||
return result.toString();
|
||||
|
@ -317,8 +317,8 @@ public class DBTest extends BaseTestCase {
|
|||
}
|
||||
|
||||
private void assertSignEquals(int a, int b) {
|
||||
a= a<0 ? -1 : (a>0 ? 1 : 0);
|
||||
b= b<0 ? -1 : (b>0 ? 1 : 0);
|
||||
a= a < 0 ? -1 : (a > 0 ? 1 : 0);
|
||||
b= b < 0 ? -1 : (b > 0 ? 1 : 0);
|
||||
assertEquals(a, b);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
* Contributors:
|
||||
* IBM Corporation - initial API and implementation
|
||||
*******************************************************************************/
|
||||
|
||||
package org.eclipse.cdt.internal.pdom.tests;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -32,21 +31,20 @@ import org.eclipse.core.runtime.IProgressMonitor;
|
|||
import org.eclipse.core.runtime.NullProgressMonitor;
|
||||
|
||||
/**
|
||||
* Tests for verifying whether the PDOM correctly stores information about
|
||||
* C++ namespaces.
|
||||
* Tests for verifying whether the PDOM correctly stores information about C++ namespaces.
|
||||
*
|
||||
* @author Vivian Kong
|
||||
*/
|
||||
public class NamespaceTests extends PDOMTestBase {
|
||||
protected ICProject project;
|
||||
protected ICProject project;
|
||||
protected PDOM pdom;
|
||||
protected IProgressMonitor NULL_MONITOR = new NullProgressMonitor();
|
||||
protected IndexFilter INDEX_FILTER = IndexFilter.ALL;
|
||||
|
||||
|
||||
public static Test suite() {
|
||||
return suite(NamespaceTests.class);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
if (pdom == null) {
|
||||
|
@ -55,7 +53,7 @@ public class NamespaceTests extends PDOMTestBase {
|
|||
}
|
||||
pdom.acquireReadLock();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
pdom.releaseReadLock();
|
||||
|
@ -63,9 +61,9 @@ public class NamespaceTests extends PDOMTestBase {
|
|||
project.getProject().delete(IResource.FORCE | IResource.ALWAYS_DELETE_PROJECT_CONTENT, new NullProgressMonitor());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testAlias() throws Exception {
|
||||
/* Find all the namespace */
|
||||
// Find all the namespace
|
||||
IBinding[] namespaces = pdom.findBindings(Pattern.compile("namespace1"), false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, namespaces.length);
|
||||
assertTrue(namespaces[0] instanceof ICPPNamespace);
|
||||
|
@ -81,147 +79,136 @@ public class NamespaceTests extends PDOMTestBase {
|
|||
assertTrue(namespaces[0] instanceof ICPPNamespace);
|
||||
assertTrue(namespaces[0] instanceof ICPPNamespaceAlias);
|
||||
ICPPNamespaceAlias namespaceAlias = (ICPPNamespaceAlias) namespaces[0];
|
||||
|
||||
//TODO PDOM has no alias information
|
||||
|
||||
// TODO PDOM has no alias information
|
||||
// namespace2 and namespaceAlias should be referencing the same namespace
|
||||
assertEquals(namespace2, namespaceAlias.getBinding());
|
||||
}
|
||||
|
||||
public void testNested() throws Exception {
|
||||
|
||||
/* Find deeply nested namespace */
|
||||
public void testNested() throws Exception {
|
||||
// Find deeply nested namespace
|
||||
Pattern[] patterns = {Pattern.compile("namespace1"), Pattern.compile("namespace2"), Pattern.compile("namespace3")};
|
||||
IBinding[] namespaces = pdom.findBindings(patterns, false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, namespaces.length);
|
||||
assertTrue(namespaces[0] instanceof ICPPNamespace);
|
||||
|
||||
}
|
||||
|
||||
public void testMemberDefinition() throws Exception {
|
||||
|
||||
/* Find the definition of a member declared in a namespace */
|
||||
public void testMemberDefinition() throws Exception {
|
||||
// Find the definition of a member declared in a namespace
|
||||
Pattern[] patterns = {Pattern.compile("namespace1"), Pattern.compile("namespace2"), Pattern.compile("foo")};
|
||||
IBinding[] members = pdom.findBindings(patterns, false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, members.length);
|
||||
assertTrue(members[0] instanceof ICPPFunction);
|
||||
|
||||
|
||||
IName[] decls = pdom.findNames(members[0], IIndex.FIND_DECLARATIONS);
|
||||
assertEquals(1, decls.length);
|
||||
IASTFileLocation loc = decls[0].getFileLocation();
|
||||
assertEquals(offset("namespace.cpp", "void foo()") + 5, loc.getNodeOffset()); //character offset
|
||||
assertEquals(offset("namespace.cpp", "void foo()") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] defs = pdom.findNames(members[0], IIndex.FIND_DEFINITIONS);
|
||||
assertEquals(1, defs.length);
|
||||
loc = defs[0].getFileLocation();
|
||||
assertEquals(offset("namespace.cpp", "::foo()") + 2, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("namespace.cpp", "::foo()") + 2, loc.getNodeOffset()); // character offset
|
||||
}
|
||||
|
||||
public void testExtend() throws Exception {
|
||||
|
||||
/* Extending a namespace */
|
||||
public void testExtend() throws Exception {
|
||||
// Extending a namespace
|
||||
IBinding[] namespaces = pdom.findBindings(Pattern.compile("ns1"), false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, namespaces.length);
|
||||
assertTrue(namespaces[0] instanceof ICPPNamespace);
|
||||
ICPPNamespace namespace1 = (ICPPNamespace) namespaces[0];
|
||||
Pattern[] patterns = {Pattern.compile("ns1"), Pattern.compile("c")};
|
||||
IBinding[] members = pdom.findBindings(patterns, false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, members.length); //c was added by extending the namespace
|
||||
assertEquals(1, members.length); // c was added by extending the namespace
|
||||
}
|
||||
|
||||
|
||||
public void testOverload() throws Exception {
|
||||
|
||||
//Function overloading in namespace
|
||||
// Function overloading in namespace
|
||||
Pattern[] patterns = {Pattern.compile("ns3"), Pattern.compile("blah")};
|
||||
IBinding[] functions = pdom.findBindings(patterns, false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, functions.length);
|
||||
assertTrue(functions[0] instanceof ICPPFunction);
|
||||
ICPPFunction function = (ICPPFunction) functions[0];
|
||||
|
||||
|
||||
IName[] defs = pdom.findNames(function, IIndex.FIND_DEFINITIONS);
|
||||
assertEquals(1, defs.length);
|
||||
IASTFileLocation loc = defs[0].getFileLocation();
|
||||
assertEquals(offset("overload.cpp","void blah(char)") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("overload.cpp", "void blah(char)") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] decls = pdom.findNames(function, IIndex.FIND_DECLARATIONS_DEFINITIONS);
|
||||
assertEquals(1, decls.length);
|
||||
loc = decls[0].getFileLocation();
|
||||
assertEquals(offset("overload.cpp","void blah(char)") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("overload.cpp", "void blah(char)") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] refs = pdom.findNames(function, IIndex.FIND_REFERENCES);
|
||||
assertEquals(1, refs.length);
|
||||
loc = refs[0].getFileLocation();
|
||||
assertEquals(offset("overload.cpp","blah('a')"), loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("overload.cpp", "blah('a')"), loc.getNodeOffset()); // character offset
|
||||
}
|
||||
|
||||
public void testUnnamed() throws Exception {
|
||||
// test case for Bugzilla 162226
|
||||
/* Unnamed Namespace */
|
||||
|
||||
public void testUnnamed_162226() throws Exception {
|
||||
// Unnamed Namespace
|
||||
IBinding[] functions = pdom.findBindings(Pattern.compile("function1"), true, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, functions.length);
|
||||
assertTrue(functions[0] instanceof ICPPFunction);
|
||||
ICPPFunction function = (ICPPFunction) functions[0];
|
||||
|
||||
|
||||
IName[] defs = pdom.findNames(function, IIndex.FIND_DEFINITIONS);
|
||||
assertEquals(1, defs.length);
|
||||
IASTFileLocation loc = defs[0].getFileLocation();
|
||||
assertEquals(offset("unnamed.cpp","void function1()") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("unnamed.cpp", "void function1()") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] decls = pdom.findNames(function, IIndex.FIND_DECLARATIONS_DEFINITIONS);
|
||||
assertEquals(1, decls.length);
|
||||
loc = decls[0].getFileLocation();
|
||||
assertEquals(offset("unnamed.cpp","void function1()") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("unnamed.cpp", "void function1()") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] refs = pdom.findNames(function, IIndex.FIND_REFERENCES);
|
||||
assertEquals(1, refs.length);
|
||||
loc = refs[0].getFileLocation();
|
||||
assertEquals(offset("unnamed.cpp","function1();"), loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("unnamed.cpp", "function1();"), loc.getNodeOffset()); // character offset
|
||||
}
|
||||
|
||||
public void testFriend() throws Exception {
|
||||
/* Friend in namespace - function2 is not in Class1*/
|
||||
// Bugzilla 162011
|
||||
|
||||
public void testFriend_162011() throws Exception {
|
||||
// Friend in namespace - function2 is not in Class1
|
||||
IBinding[] functions = pdom.findBindings(Pattern.compile("function2"), false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, functions.length);
|
||||
assertTrue(functions[0] instanceof ICPPFunction);
|
||||
ICPPFunction function = (ICPPFunction) functions[0];
|
||||
|
||||
|
||||
IName[] defs = pdom.findNames(function, IIndex.FIND_DEFINITIONS);
|
||||
assertEquals(1, defs.length);
|
||||
IASTFileLocation loc = defs[0].getFileLocation();
|
||||
assertEquals(offset("friend.cpp","void function2(Class1){};") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("friend.cpp", "void function2(Class1){};") + 5, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] decls = pdom.findNames(function, IIndex.FIND_DECLARATIONS);
|
||||
assertEquals(1, decls.length);
|
||||
loc = decls[0].getFileLocation();
|
||||
assertEquals(offset("friend.cpp","friend void function2(Class1);") + 12, loc.getNodeOffset()); //character offset
|
||||
assertEquals(offset("friend.cpp", "friend void function2(Class1);") + 12, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] refs = pdom.findNames(function, IIndex.FIND_REFERENCES);
|
||||
assertEquals(1, refs.length);
|
||||
loc = refs[0].getFileLocation();
|
||||
assertEquals(offset("friend.cpp","ns4::function2(element)") + 5, loc.getNodeOffset()); //character offset
|
||||
|
||||
assertEquals(offset("friend.cpp", "ns4::function2(element)") + 5, loc.getNodeOffset()); // character offset
|
||||
}
|
||||
|
||||
|
||||
public void testUsingDirective() throws Exception {
|
||||
//TODO need to test for PDOM? or is it more for compiler?
|
||||
// TODO need to test for PDOM? or is it more for compiler?
|
||||
Pattern[] patterns = {Pattern.compile("ns4"), Pattern.compile("element")};
|
||||
IBinding[] variables = pdom.findBindings(patterns, false, INDEX_FILTER, NULL_MONITOR);
|
||||
assertEquals(1, variables.length);
|
||||
assertTrue(variables[0] instanceof ICPPVariable);
|
||||
ICPPVariable variable1 = (ICPPVariable) variables[0];
|
||||
|
||||
|
||||
IName[] defs = pdom.findNames(variable1, IIndex.FIND_DEFINITIONS);
|
||||
assertEquals(1, defs.length);
|
||||
IASTFileLocation loc = defs[0].getFileLocation();
|
||||
assertEquals(offset("friend.cpp","Class1 element;") + 7, loc.getNodeOffset()); //character offset
|
||||
assertEquals(offset("friend.cpp", "Class1 element;") + 7, loc.getNodeOffset()); // character offset
|
||||
|
||||
IName[] decls = pdom.findNames(variable1, IIndex.FIND_DECLARATIONS);
|
||||
assertEquals(0, decls.length);
|
||||
|
||||
|
||||
IName[] refs = pdom.findNames(variable1, IIndex.FIND_REFERENCES);
|
||||
assertEquals(2, refs.length);
|
||||
assertEquals(2, refs.length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,34 +38,34 @@ import com.ibm.icu.text.MessageFormat;
|
|||
*
|
||||
* @author Doug Schaefer
|
||||
*/
|
||||
/*
|
||||
/*
|
||||
* The file encapsulated is divided into Chunks of size CHUNK_SIZE, and a table of contents
|
||||
* mapping chunk index to chunk address is maintained. Chunk structure exists only conceptually -
|
||||
* it is not a structure that appears in the file.
|
||||
*
|
||||
*
|
||||
* ===== The first chunk is used by Database itself for house-keeping purposes and has structure
|
||||
*
|
||||
*
|
||||
* offset content
|
||||
* _____________________________
|
||||
* 0 | version number
|
||||
* INT_SIZE | pointer to head of linked list of blocks of size MIN_BLOCK_DELTAS*BLOCK_SIZE_DELTA
|
||||
* .. | ...
|
||||
* INT_SIZE * m (1) | pointer to head of linked list of blocks of size (m+MIN_BLOCK_DELTAS) * BLOCK_SIZE_DELTA
|
||||
* DATA_AREA | undefined (PDOM stores its own house-keeping data in this area)
|
||||
*
|
||||
* INT_SIZE * m (1) | pointer to head of linked list of blocks of size (m+MIN_BLOCK_DELTAS) * BLOCK_SIZE_DELTA
|
||||
* DATA_AREA | undefined (PDOM stores its own house-keeping data in this area)
|
||||
*
|
||||
* (1) where 2 <= m <= CHUNK_SIZE/BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS + 1
|
||||
*
|
||||
*
|
||||
* ===== block structure
|
||||
*
|
||||
*
|
||||
* offset content
|
||||
* _____________________________
|
||||
* 0 | size of block (negative indicates in use, positive unused) (2 bytes)
|
||||
* PREV_OFFSET | pointer to prev block (of same size) (only in free blocks)
|
||||
* NEXT_OFFSET | pointer to next block (of same size) (only in free blocks)
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class Database {
|
||||
// public for tests only, you shouldn't need these
|
||||
// Public for tests only, you shouldn't need these.
|
||||
public static final int INT_SIZE = 4;
|
||||
public static final int CHUNK_SIZE = 1024 * 4;
|
||||
public static final int OFFSET_IN_CHUNK_MASK= CHUNK_SIZE-1;
|
||||
|
@ -73,27 +73,26 @@ public class Database {
|
|||
public static final int BLOCK_SIZE_DELTA_BITS = 3;
|
||||
public static final int BLOCK_SIZE_DELTA= 1 << BLOCK_SIZE_DELTA_BITS;
|
||||
public static final int MIN_BLOCK_DELTAS = 2; // a block must at least be 2 + 2*4 bytes to link the free blocks.
|
||||
public static final int MAX_BLOCK_DELTAS = CHUNK_SIZE / BLOCK_SIZE_DELTA;
|
||||
public static final int MAX_MALLOC_SIZE = MAX_BLOCK_DELTAS * BLOCK_SIZE_DELTA - BLOCK_HEADER_SIZE;
|
||||
public static final int PTR_SIZE = 4; // size of a pointer in the database in bytes
|
||||
public static final int MAX_BLOCK_DELTAS = CHUNK_SIZE / BLOCK_SIZE_DELTA;
|
||||
public static final int MAX_MALLOC_SIZE = MAX_BLOCK_DELTAS * BLOCK_SIZE_DELTA - BLOCK_HEADER_SIZE;
|
||||
public static final int PTR_SIZE = 4; // size of a pointer in the database in bytes
|
||||
public static final int TYPE_SIZE = 2 + PTR_SIZE; // size of a type in the database in bytes
|
||||
public static final int VALUE_SIZE = TYPE_SIZE; // size of a value in the database in bytes
|
||||
public static final int EVALUATION_SIZE = TYPE_SIZE; // size of an evaluation in the database in bytes
|
||||
public static final int ARGUMENT_SIZE = TYPE_SIZE; // size of a template argument in the database in bytes
|
||||
public static final long MAX_DB_SIZE= ((long) 1 << (Integer.SIZE + BLOCK_SIZE_DELTA_BITS));
|
||||
|
||||
|
||||
public static final int VERSION_OFFSET = 0;
|
||||
public static final int DATA_AREA = (CHUNK_SIZE / BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS + 2) * INT_SIZE;
|
||||
|
||||
|
||||
private static final int BLOCK_PREV_OFFSET = BLOCK_HEADER_SIZE;
|
||||
private static final int BLOCK_NEXT_OFFSET = BLOCK_HEADER_SIZE + INT_SIZE;
|
||||
|
||||
|
||||
private final File fLocation;
|
||||
private final boolean fReadOnly;
|
||||
private RandomAccessFile fFile;
|
||||
private boolean fExclusiveLock; // necessary for any write operation
|
||||
private boolean fLocked; // necessary for any operation.
|
||||
private boolean fExclusiveLock; // Necessary for any write operation.
|
||||
private boolean fLocked; // Necessary for any operation.
|
||||
private boolean fIsMarkedIncomplete;
|
||||
|
||||
private int fVersion;
|
||||
|
@ -102,15 +101,15 @@ public class Database {
|
|||
private int fChunksUsed;
|
||||
private int fChunksAllocated;
|
||||
private ChunkCache fCache;
|
||||
|
||||
|
||||
private long malloced;
|
||||
private long freed;
|
||||
private long cacheHits;
|
||||
private long cacheMisses;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a new Database object, creating a backing file if necessary.
|
||||
* @param location the local file path for the database
|
||||
* @param location the local file path for the database
|
||||
* @param cache the cache to be used optimization
|
||||
* @param version the version number to store in the database (only applicable for new databases)
|
||||
* @param openReadOnly whether this Database object will ever need writing to
|
||||
|
@ -122,10 +121,10 @@ public class Database {
|
|||
fReadOnly= openReadOnly;
|
||||
fCache= cache;
|
||||
openFile();
|
||||
|
||||
|
||||
int nChunksOnDisk = (int) (fFile.length() / CHUNK_SIZE);
|
||||
fHeaderChunk= new Chunk(this, 0);
|
||||
fHeaderChunk.fLocked= true; // never makes it into the cache, needed to satisfy assertions
|
||||
fHeaderChunk.fLocked= true; // Never makes it into the cache, needed to satisfy assertions.
|
||||
if (nChunksOnDisk <= 0) {
|
||||
fVersion= version;
|
||||
fChunks= new Chunk[1];
|
||||
|
@ -140,7 +139,7 @@ public class Database {
|
|||
throw new CoreException(new DBStatus(e));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void openFile() throws FileNotFoundException {
|
||||
fFile = new RandomAccessFile(fLocation, fReadOnly ? "r" : "rw"); //$NON-NLS-1$ //$NON-NLS-2$
|
||||
}
|
||||
|
@ -154,7 +153,7 @@ public class Database {
|
|||
} catch (ClosedChannelException e) {
|
||||
// Bug 219834 file may have be closed by interrupting a thread during an I/O operation.
|
||||
reopen(e, ++retries);
|
||||
}
|
||||
}
|
||||
} while (true);
|
||||
}
|
||||
|
||||
|
@ -167,7 +166,7 @@ public class Database {
|
|||
} catch (ClosedChannelException e) {
|
||||
// Bug 219834 file may have be closed by interrupting a thread during an I/O operation.
|
||||
reopen(e, ++retries);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,17 +187,17 @@ public class Database {
|
|||
while (position < size) {
|
||||
nRead = from.transferTo(position, 4096 * 16, target);
|
||||
if (nRead == 0) {
|
||||
break; // Should not happen
|
||||
break; // Should not happen.
|
||||
} else {
|
||||
position+= nRead;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public int getVersion() {
|
||||
return fVersion;
|
||||
}
|
||||
|
||||
|
||||
public void setVersion(int version) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
fHeaderChunk.putInt(VERSION_OFFSET, version);
|
||||
|
@ -212,28 +211,28 @@ public class Database {
|
|||
public void clear(int version) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
removeChunksFromCache();
|
||||
|
||||
|
||||
fVersion= version;
|
||||
// clear the first chunk.
|
||||
// Clear the first chunk.
|
||||
fHeaderChunk.clear(0, CHUNK_SIZE);
|
||||
// chunks have been removed from the cache, so we may just reset the array of chunks.
|
||||
// Chunks have been removed from the cache, so we may just reset the array of chunks.
|
||||
fChunks = new Chunk[] {null};
|
||||
fChunksUsed = fChunksAllocated = fChunks.length;
|
||||
try {
|
||||
fHeaderChunk.flush(); // zero out header chunk
|
||||
fFile.getChannel().truncate(CHUNK_SIZE); // truncate database
|
||||
fHeaderChunk.flush(); // Zero out header chunk.
|
||||
fFile.getChannel().truncate(CHUNK_SIZE); // Truncate database.
|
||||
} catch (IOException e) {
|
||||
CCorePlugin.log(e);
|
||||
}
|
||||
malloced = freed = 0;
|
||||
/*
|
||||
* This is for debugging purposes in order to simulate having a very large PDOM database.
|
||||
* This is for debugging purposes in order to simulate having a very large PDOM database.
|
||||
* This will set aside the specified number of chunks.
|
||||
* Nothing uses these chunks so subsequent allocations come after these fillers.
|
||||
* The special function createNewChunks allocates all of these chunks at once.
|
||||
* 524288 for a file starting at 2G
|
||||
* 8388608 for a file starting at 32G
|
||||
*
|
||||
*
|
||||
*/
|
||||
long setasideChunks = Long.getLong("org.eclipse.cdt.core.parser.pdom.dense.recptr.setaside.chunks", 0); //$NON-NLS-1$
|
||||
if (setasideChunks != 0) {
|
||||
|
@ -254,17 +253,17 @@ public class Database {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the Chunk that contains the given offset.
|
||||
* @throws CoreException
|
||||
* @throws CoreException
|
||||
*/
|
||||
public Chunk getChunk(long offset) throws CoreException {
|
||||
if (offset < CHUNK_SIZE) {
|
||||
return fHeaderChunk;
|
||||
}
|
||||
long long_index = offset / CHUNK_SIZE;
|
||||
assert long_index < Integer.MAX_VALUE;
|
||||
assert long_index < Integer.MAX_VALUE;
|
||||
|
||||
synchronized (fCache) {
|
||||
assert fLocked;
|
||||
|
@ -293,17 +292,17 @@ public class Database {
|
|||
|
||||
/**
|
||||
* Allocate a block out of the database.
|
||||
*/
|
||||
*/
|
||||
public long malloc(final int datasize) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
assert datasize >=0 && datasize <= MAX_MALLOC_SIZE;
|
||||
|
||||
assert datasize >= 0 && datasize <= MAX_MALLOC_SIZE;
|
||||
|
||||
int needDeltas= (datasize + BLOCK_HEADER_SIZE + BLOCK_SIZE_DELTA - 1) / BLOCK_SIZE_DELTA;
|
||||
if (needDeltas < MIN_BLOCK_DELTAS) {
|
||||
needDeltas= MIN_BLOCK_DELTAS;
|
||||
}
|
||||
|
||||
// Which block size
|
||||
// Which block size.
|
||||
long freeblock = 0;
|
||||
int useDeltas;
|
||||
for (useDeltas= needDeltas; useDeltas <= MAX_BLOCK_DELTAS; useDeltas++) {
|
||||
|
@ -311,11 +310,11 @@ public class Database {
|
|||
if (freeblock != 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// get the block
|
||||
|
||||
// Get the block.
|
||||
Chunk chunk;
|
||||
if (freeblock == 0) {
|
||||
// allocate a new chunk
|
||||
// Allocate a new chunk.
|
||||
freeblock= createNewChunk();
|
||||
useDeltas = MAX_BLOCK_DELTAS;
|
||||
chunk = getChunk(freeblock);
|
||||
|
@ -323,25 +322,25 @@ public class Database {
|
|||
chunk = getChunk(freeblock);
|
||||
removeBlock(chunk, useDeltas*BLOCK_SIZE_DELTA, freeblock);
|
||||
}
|
||||
|
||||
|
||||
final int unusedDeltas = useDeltas-needDeltas;
|
||||
if (unusedDeltas >= MIN_BLOCK_DELTAS) {
|
||||
// Add in the unused part of our block
|
||||
// Add in the unused part of our block.
|
||||
addBlock(chunk, unusedDeltas*BLOCK_SIZE_DELTA, freeblock + needDeltas*BLOCK_SIZE_DELTA);
|
||||
useDeltas= needDeltas;
|
||||
}
|
||||
|
||||
// Make our size negative to show in use
|
||||
|
||||
// Make our size negative to show in use.
|
||||
final int usedSize= useDeltas*BLOCK_SIZE_DELTA;
|
||||
chunk.putShort(freeblock, (short) -usedSize);
|
||||
|
||||
// Clear out the block, lots of people are expecting this
|
||||
// Clear out the block, lots of people are expecting this.
|
||||
chunk.clear(freeblock + BLOCK_HEADER_SIZE, usedSize-BLOCK_HEADER_SIZE);
|
||||
|
||||
malloced+= usedSize;
|
||||
return freeblock + BLOCK_HEADER_SIZE;
|
||||
}
|
||||
|
||||
|
||||
private long createNewChunk() throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
synchronized (fCache) {
|
||||
|
@ -402,37 +401,37 @@ public class Database {
|
|||
return (long) (oldLen + numChunks - 1) * CHUNK_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private long getFirstBlock(int blocksize) throws CoreException {
|
||||
assert fLocked;
|
||||
return fHeaderChunk.getFreeRecPtr((blocksize/BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS + 1) * INT_SIZE);
|
||||
}
|
||||
|
||||
|
||||
private void setFirstBlock(int blocksize, long block) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
fHeaderChunk.putFreeRecPtr((blocksize/BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS + 1) * INT_SIZE, block);
|
||||
}
|
||||
|
||||
|
||||
private void removeBlock(Chunk chunk, int blocksize, long block) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
long prevblock = chunk.getFreeRecPtr(block + BLOCK_PREV_OFFSET);
|
||||
long nextblock = chunk.getFreeRecPtr(block + BLOCK_NEXT_OFFSET);
|
||||
if (prevblock != 0) {
|
||||
putFreeRecPtr(prevblock + BLOCK_NEXT_OFFSET, nextblock);
|
||||
} else { // we were the head
|
||||
} else { // We were the head.
|
||||
setFirstBlock(blocksize, nextblock);
|
||||
}
|
||||
|
||||
|
||||
if (nextblock != 0)
|
||||
putFreeRecPtr(nextblock + BLOCK_PREV_OFFSET, prevblock);
|
||||
}
|
||||
|
||||
|
||||
private void addBlock(Chunk chunk, int blocksize, long block) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
// Mark our size
|
||||
chunk.putShort(block, (short) blocksize);
|
||||
|
||||
// Add us to the head of the list
|
||||
// Add us to the head of the list.
|
||||
long prevfirst = getFirstBlock(blocksize);
|
||||
chunk.putFreeRecPtr(block + BLOCK_PREV_OFFSET, 0);
|
||||
chunk.putFreeRecPtr(block + BLOCK_NEXT_OFFSET, prevfirst);
|
||||
|
@ -440,21 +439,22 @@ public class Database {
|
|||
putFreeRecPtr(prevfirst + BLOCK_PREV_OFFSET, block);
|
||||
setFirstBlock(blocksize, block);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Free an allocated block.
|
||||
*
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
public void free(long offset) throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
// TODO - look for opportunities to merge blocks
|
||||
// TODO Look for opportunities to merge blocks
|
||||
long block = offset - BLOCK_HEADER_SIZE;
|
||||
Chunk chunk = getChunk(block);
|
||||
int blocksize = - chunk.getShort(block);
|
||||
if (blocksize < 0) {
|
||||
// already freed
|
||||
throw new CoreException(new Status(IStatus.ERROR, CCorePlugin.PLUGIN_ID, 0, "Already Freed", new Exception())); //$NON-NLS-1$
|
||||
// Already freed.
|
||||
throw new CoreException(new Status(IStatus.ERROR, CCorePlugin.PLUGIN_ID, 0,
|
||||
"Already freed", new Exception())); //$NON-NLS-1$
|
||||
}
|
||||
addBlock(chunk, blocksize, block);
|
||||
freed += blocksize;
|
||||
|
@ -463,31 +463,31 @@ public class Database {
|
|||
public void putByte(long offset, byte value) throws CoreException {
|
||||
getChunk(offset).putByte(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public byte getByte(long offset) throws CoreException {
|
||||
return getChunk(offset).getByte(offset);
|
||||
}
|
||||
|
||||
|
||||
public void putInt(long offset, int value) throws CoreException {
|
||||
getChunk(offset).putInt(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public int getInt(long offset) throws CoreException {
|
||||
return getChunk(offset).getInt(offset);
|
||||
}
|
||||
|
||||
|
||||
public void putRecPtr(long offset, long value) throws CoreException {
|
||||
getChunk(offset).putRecPtr(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public long getRecPtr(long offset) throws CoreException {
|
||||
return getChunk(offset).getRecPtr(offset);
|
||||
}
|
||||
|
||||
|
||||
private void putFreeRecPtr(long offset, long value) throws CoreException {
|
||||
getChunk(offset).putFreeRecPtr(offset, value);
|
||||
}
|
||||
|
||||
|
||||
private long getFreeRecPtr(long offset) throws CoreException {
|
||||
return getChunk(offset).getFreeRecPtr(offset);
|
||||
}
|
||||
|
@ -495,15 +495,15 @@ public class Database {
|
|||
public void put3ByteUnsignedInt(long offset, int value) throws CoreException {
|
||||
getChunk(offset).put3ByteUnsignedInt(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public int get3ByteUnsignedInt(long offset) throws CoreException {
|
||||
return getChunk(offset).get3ByteUnsignedInt(offset);
|
||||
}
|
||||
|
||||
|
||||
public void putShort(long offset, short value) throws CoreException {
|
||||
getChunk(offset).putShort(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public short getShort(long offset) throws CoreException {
|
||||
return getChunk(offset).getShort(offset);
|
||||
}
|
||||
|
@ -511,7 +511,7 @@ public class Database {
|
|||
public void putLong(long offset, long value) throws CoreException {
|
||||
getChunk(offset).putLong(offset, value);
|
||||
}
|
||||
|
||||
|
||||
public long getLong(long offset) throws CoreException {
|
||||
return getChunk(offset).getLong(offset);
|
||||
}
|
||||
|
@ -523,7 +523,7 @@ public class Database {
|
|||
public char getChar(long offset) throws CoreException {
|
||||
return getChunk(offset).getChar(offset);
|
||||
}
|
||||
|
||||
|
||||
public void clearBytes(long offset, int byteCount) throws CoreException {
|
||||
getChunk(offset).clear(offset, byteCount);
|
||||
}
|
||||
|
@ -557,14 +557,14 @@ public class Database {
|
|||
} else {
|
||||
bytelen= 2 * len;
|
||||
}
|
||||
|
||||
|
||||
if (bytelen > ShortString.MAX_BYTE_LENGTH) {
|
||||
return new LongString(this, chars, useBytes);
|
||||
} else {
|
||||
return new ShortString(this, chars, useBytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean useBytes(char[] chars) {
|
||||
for (char c : chars) {
|
||||
if ((c & 0xff00) != 0)
|
||||
|
@ -581,7 +581,7 @@ public class Database {
|
|||
}
|
||||
return new ShortString(this, offset);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* For debugging purposes, only.
|
||||
*/
|
||||
|
@ -602,19 +602,19 @@ public class Database {
|
|||
System.out.println("Block size: " + bs + "=" + count); //$NON-NLS-1$ //$NON-NLS-2$
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Closes the database.
|
||||
* Closes the database.
|
||||
* <p>
|
||||
* The behavior of any further calls to the Database is undefined
|
||||
* @throws CoreException
|
||||
* @throws CoreException
|
||||
*/
|
||||
public void close() throws CoreException {
|
||||
assert fExclusiveLock;
|
||||
flush();
|
||||
removeChunksFromCache();
|
||||
|
||||
// chunks have been removed from the cache, so we are fine
|
||||
|
||||
// Chunks have been removed from the cache, so we are fine.
|
||||
fHeaderChunk.clear(0, CHUNK_SIZE);
|
||||
fHeaderChunk.fDirty= false;
|
||||
fChunks= new Chunk[] { null };
|
||||
|
@ -625,7 +625,7 @@ public class Database {
|
|||
throw new CoreException(new DBStatus(e));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This method is public for testing purposes only.
|
||||
*/
|
||||
|
@ -639,7 +639,7 @@ public class Database {
|
|||
void releaseChunk(final Chunk chunk) {
|
||||
if (!chunk.fLocked) {
|
||||
fChunks[chunk.fSequenceNumber]= null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -662,7 +662,7 @@ public class Database {
|
|||
public void setLocked(boolean val) {
|
||||
fLocked= val;
|
||||
}
|
||||
|
||||
|
||||
public void giveUpExclusiveLock(final boolean flush) throws CoreException {
|
||||
if (fExclusiveLock) {
|
||||
try {
|
||||
|
@ -671,16 +671,16 @@ public class Database {
|
|||
for (int i= 1; i < fChunksUsed; i++) {
|
||||
Chunk chunk= fChunks[i];
|
||||
if (chunk != null) {
|
||||
if (chunk.fCacheIndex < 0) {
|
||||
// locked chunk that has been removed from cache.
|
||||
if (chunk.fCacheIndex < 0) {
|
||||
// Locked chunk that has been removed from cache.
|
||||
if (chunk.fDirty) {
|
||||
dirtyChunks.add(chunk); // keep in fChunks until it is flushed.
|
||||
dirtyChunks.add(chunk); // Keep in fChunks until it is flushed.
|
||||
} else {
|
||||
chunk.fLocked= false;
|
||||
fChunks[i]= null;
|
||||
}
|
||||
} else if (chunk.fLocked) {
|
||||
// locked chunk, still in cache.
|
||||
// Locked chunk, still in cache.
|
||||
if (chunk.fDirty) {
|
||||
if (flush) {
|
||||
dirtyChunks.add(chunk);
|
||||
|
@ -689,19 +689,19 @@ public class Database {
|
|||
chunk.fLocked= false;
|
||||
}
|
||||
} else {
|
||||
assert !chunk.fDirty; // dirty chunks must be locked.
|
||||
assert !chunk.fDirty; // Dirty chunks must be locked.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// also handles header chunk
|
||||
// Also handles header chunk.
|
||||
flushAndUnlockChunks(dirtyChunks, flush);
|
||||
} finally {
|
||||
fExclusiveLock= false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void flush() throws CoreException {
|
||||
assert fLocked;
|
||||
if (fExclusiveLock) {
|
||||
|
@ -713,7 +713,7 @@ public class Database {
|
|||
return;
|
||||
}
|
||||
|
||||
// be careful as other readers may access chunks concurrently
|
||||
// Be careful as other readers may access chunks concurrently.
|
||||
ArrayList<Chunk> dirtyChunks= new ArrayList<Chunk>();
|
||||
synchronized (fCache) {
|
||||
for (int i= 1; i < fChunksUsed ; i++) {
|
||||
|
@ -724,7 +724,7 @@ public class Database {
|
|||
}
|
||||
}
|
||||
|
||||
// also handles header chunk
|
||||
// Also handles header chunk.
|
||||
flushAndUnlockChunks(dirtyChunks, true);
|
||||
}
|
||||
|
||||
|
@ -742,7 +742,7 @@ public class Database {
|
|||
}
|
||||
}
|
||||
|
||||
// only after the chunks are flushed we may unlock and release them.
|
||||
// Only after the chunks are flushed we may unlock and release them.
|
||||
synchronized (fCache) {
|
||||
for (Chunk chunk : dirtyChunks) {
|
||||
chunk.fLocked= false;
|
||||
|
@ -762,7 +762,7 @@ public class Database {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void markFileIncomplete() throws CoreException {
|
||||
if (!fIsMarkedIncomplete) {
|
||||
fIsMarkedIncomplete= true;
|
||||
|
@ -778,11 +778,11 @@ public class Database {
|
|||
public void resetCacheCounters() {
|
||||
cacheHits= cacheMisses= 0;
|
||||
}
|
||||
|
||||
|
||||
public long getCacheHits() {
|
||||
return cacheHits;
|
||||
}
|
||||
|
||||
|
||||
public long getCacheMisses() {
|
||||
return cacheMisses;
|
||||
}
|
||||
|
|
|
@ -60,7 +60,8 @@ public class PDOMCPPClassTemplate extends PDOMCPPClassType
|
|||
|
||||
private volatile ICPPTemplateParameter[] params; // Cached template parameters.
|
||||
|
||||
public PDOMCPPClassTemplate(PDOMCPPLinkage linkage, PDOMNode parent, ICPPClassTemplate template) throws CoreException, DOMException {
|
||||
public PDOMCPPClassTemplate(PDOMCPPLinkage linkage, PDOMNode parent, ICPPClassTemplate template)
|
||||
throws CoreException, DOMException {
|
||||
super(linkage, parent, template);
|
||||
|
||||
final Database db = getDB();
|
||||
|
@ -127,7 +128,8 @@ public class PDOMCPPClassTemplate extends PDOMCPPClassType
|
|||
}
|
||||
}
|
||||
|
||||
private void updateTemplateParameters(PDOMLinkage linkage, ICPPTemplateParameter[] newParams) throws CoreException, DOMException {
|
||||
private void updateTemplateParameters(PDOMLinkage linkage, ICPPTemplateParameter[] newParams)
|
||||
throws CoreException, DOMException {
|
||||
final Database db = getDB();
|
||||
long rec= db.getRecPtr(record + PARAMETERS);
|
||||
IPDOMCPPTemplateParameter[] allParams;
|
||||
|
@ -167,7 +169,7 @@ public class PDOMCPPClassTemplate extends PDOMCPPClassType
|
|||
|
||||
if (additionalPars > 0 || reorder) {
|
||||
params= null;
|
||||
IPDOMCPPTemplateParameter[] newAllParams= new IPDOMCPPTemplateParameter[allParams.length+additionalPars];
|
||||
IPDOMCPPTemplateParameter[] newAllParams= new IPDOMCPPTemplateParameter[allParams.length + additionalPars];
|
||||
for (int j = 0; j < newParamLength; j++) {
|
||||
int idx= result[j];
|
||||
if (idx >= 0) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* http://www.eclipse.org/legal/epl-v10.html
|
||||
*
|
||||
* Contributors:
|
||||
* Markus Schorn - initial API and implementation
|
||||
* Markus Schorn - initial API and implementation
|
||||
*******************************************************************************/
|
||||
package org.eclipse.cdt.internal.core.pdom.dom.cpp;
|
||||
|
||||
|
@ -22,7 +22,7 @@ import org.eclipse.core.runtime.Assert;
|
|||
import org.eclipse.core.runtime.CoreException;
|
||||
|
||||
/**
|
||||
* Collects methods to store an argument list in the database
|
||||
* Collects methods to store an argument list in the database.
|
||||
*/
|
||||
public class PDOMTemplateParameterArray {
|
||||
/**
|
||||
|
@ -30,12 +30,12 @@ public class PDOMTemplateParameterArray {
|
|||
* @return the record by which the arguments can be referenced.
|
||||
*/
|
||||
public static long putArray(final Database db, IPDOMCPPTemplateParameter[] params) throws CoreException {
|
||||
final short len= (short) Math.min(params.length, (Database.MAX_MALLOC_SIZE-2)/8);
|
||||
final long block= db.malloc(2+8*len);
|
||||
final short len= (short) Math.min(params.length, (Database.MAX_MALLOC_SIZE - 2) / 8);
|
||||
final long block= db.malloc(2 + 8 * len);
|
||||
long p= block;
|
||||
|
||||
db.putShort(p, len); p+=2;
|
||||
for (int i=0; i<len; i++, p+=4) {
|
||||
db.putShort(p, len); p += 2;
|
||||
for (int i= 0; i < len; i++, p += 4) {
|
||||
final IPDOMCPPTemplateParameter elem= params[i];
|
||||
db.putRecPtr(p, elem == null ? 0 : elem.getRecord());
|
||||
}
|
||||
|
@ -50,15 +50,15 @@ public class PDOMTemplateParameterArray {
|
|||
final Database db= linkage.getDB();
|
||||
final short len= db.getShort(rec);
|
||||
|
||||
Assert.isTrue(len >= 0 && len <= (Database.MAX_MALLOC_SIZE-2)/8);
|
||||
Assert.isTrue(len >= 0 && len <= (Database.MAX_MALLOC_SIZE - 2) / 8);
|
||||
if (len == 0) {
|
||||
return IPDOMCPPTemplateParameter.EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
rec+=2;
|
||||
rec += 2;
|
||||
IPDOMCPPTemplateParameter[] result= new IPDOMCPPTemplateParameter[len];
|
||||
for (int i=0; i<len; i++) {
|
||||
final long nodeRec= db.getRecPtr(rec); rec+=4;
|
||||
for (int i= 0; i < len; i++) {
|
||||
final long nodeRec= db.getRecPtr(rec); rec += 4;
|
||||
result[i]= nodeRec == 0 ? null : (IPDOMCPPTemplateParameter) linkage.getNode(nodeRec);
|
||||
}
|
||||
return result;
|
||||
|
@ -67,7 +67,8 @@ public class PDOMTemplateParameterArray {
|
|||
/**
|
||||
* Creates template parameters in the pdom
|
||||
*/
|
||||
public static IPDOMCPPTemplateParameter[] createPDOMTemplateParameters(PDOMLinkage linkage, PDOMNode parent, ICPPTemplateParameter[] origParams) throws CoreException, DOMException {
|
||||
public static IPDOMCPPTemplateParameter[] createPDOMTemplateParameters(PDOMLinkage linkage,
|
||||
PDOMNode parent, ICPPTemplateParameter[] origParams) throws CoreException, DOMException {
|
||||
IPDOMCPPTemplateParameter[] params= new IPDOMCPPTemplateParameter[origParams.length];
|
||||
for (int i = 0; i < origParams.length; i++) {
|
||||
params[i]= createPDOMTemplateParameter(linkage, parent, origParams[i]);
|
||||
|
@ -78,7 +79,8 @@ public class PDOMTemplateParameterArray {
|
|||
/**
|
||||
* Creates a template parameter in the pdom
|
||||
*/
|
||||
public static IPDOMCPPTemplateParameter createPDOMTemplateParameter(PDOMLinkage linkage, PDOMNode parent, ICPPTemplateParameter origParam) throws CoreException, DOMException {
|
||||
public static IPDOMCPPTemplateParameter createPDOMTemplateParameter(PDOMLinkage linkage,
|
||||
PDOMNode parent, ICPPTemplateParameter origParam) throws CoreException, DOMException {
|
||||
IPDOMCPPTemplateParameter param= null;
|
||||
if (origParam instanceof ICPPTemplateNonTypeParameter) {
|
||||
param= new PDOMCPPTemplateNonTypeParameter(linkage, parent, (ICPPTemplateNonTypeParameter) origParam);
|
||||
|
|
Loading…
Add table
Reference in a new issue