1
0
Fork 0
mirror of https://github.com/eclipse-cdt/cdt synced 2025-06-06 17:26:01 +02:00

Fix for 194739, safeguard against corrupt indexes.

This commit is contained in:
Markus Schorn 2007-06-29 13:25:23 +00:00
parent 05c1dee1bc
commit 22d6ac5a83
13 changed files with 363 additions and 255 deletions

View file

@ -53,7 +53,7 @@ public class BTreeTests extends BaseTestCase {
protected void init(int degree) throws Exception {
dbFile = File.createTempFile("pdomtest", "db");
db = new Database(dbFile, new ChunkCache(), 0, false);
db.setWritable();
db.setExclusiveLock();
rootRecord = Database.DATA_AREA;
comparator = new BTMockRecordComparator();
btree = new BTree(db, rootRecord, degree, comparator);

View file

@ -38,7 +38,7 @@ public class DBPropertiesTests extends BaseTestCase {
dbLoc = File.createTempFile("test", "db");
dbLoc.deleteOnExit();
db = new Database(dbLoc, new ChunkCache(), 0, false);
db.setWritable();
db.setExclusiveLock();
}
protected void tearDown() throws Exception {
@ -46,7 +46,6 @@ public class DBPropertiesTests extends BaseTestCase {
}
public void testBasic() throws CoreException {
db.setWritable();
DBProperties properties = new DBProperties(db);
Properties expected = System.getProperties();
for(Iterator i = expected.keySet().iterator(); i.hasNext(); ) {

View file

@ -37,6 +37,7 @@ public class DBTest extends BaseTestCase {
super.setUp();
db = new Database(getTestDir().append(getName()+System.currentTimeMillis()+".dat").toFile(),
new ChunkCache(), 0, false);
db.setExclusiveLock();
}
public static Test suite() {
@ -65,7 +66,6 @@ public class DBTest extends BaseTestCase {
final int realsize = 42;
final int blocksize = (realsize / Database.MIN_SIZE + 1) * Database.MIN_SIZE;
db.setWritable();
int mem = db.malloc(realsize);
assertEquals(-blocksize, db.getInt(mem - Database.INT_SIZE));
db.free(mem);
@ -102,7 +102,6 @@ public class DBTest extends BaseTestCase {
final int realsize = 42;
final int blocksize = (realsize / Database.MIN_SIZE + 1) * Database.MIN_SIZE;
db.setWritable();
int mem1 = db.malloc(realsize);
int mem2 = db.malloc(realsize);
db.free(mem1);
@ -115,7 +114,6 @@ public class DBTest extends BaseTestCase {
}
public void testSimpleAllocationLifecycle() throws Exception {
db.setWritable();
int mem1 = db.malloc(42);
db.free(mem1);
int mem2 = db.malloc(42);
@ -152,7 +150,7 @@ public class DBTest extends BaseTestCase {
File f = getTestDir().append("testStrings.dat").toFile();
f.delete();
final Database db = new Database(f, new ChunkCache(), 0, false);
db.setWritable();
db.setExclusiveLock();
String[] names = {
"ARLENE",
@ -270,12 +268,10 @@ public class DBTest extends BaseTestCase {
{
char[] acs = a.toCharArray();
char[] bcs = b.toCharArray();
db.setWritable();
IString aiss = db.newString(a);
IString biss = db.newString(b);
IString aisc = db.newString(acs);
IString bisc = db.newString(bcs);
db.setReadOnly(true);
assertSignEquals(expected, aiss.compare(bcs, caseSensitive));
assertSignEquals(expected, aiss.compare(biss, caseSensitive));

View file

@ -105,7 +105,13 @@ public class GeneratePDOMApplicationTest extends PDOMTestBase {
WritablePDOM wpdom= new WritablePDOM(target, new URIRelativeLocationConverter(baseURI), LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
verifyProject1Content(wpdom);
String fid= wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
String fid;
wpdom.acquireReadLock();
try {
fid = wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
} finally {
wpdom.releaseReadLock();
}
assertNotNull(fid);
assertTrue(fid.startsWith("export")); // check for default export id
}
@ -120,13 +126,17 @@ public class GeneratePDOMApplicationTest extends PDOMTestBase {
WritablePDOM wpdom= new WritablePDOM(target, new URIRelativeLocationConverter(baseURI), LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
verifyProject1Content(wpdom);
String fid= wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
assertNotNull(fid);
assertEquals(ACME_SDK_ID, fid); // check for custom export id
String sdkVer= wpdom.getProperty(SDK_VERSION);
assertNotNull(sdkVer);
assertEquals("4.0.1", sdkVer); // check for custom property value
wpdom.acquireReadLock();
try {
String fid = wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
assertNotNull(fid);
assertEquals(ACME_SDK_ID, fid); // check for custom export id
String sdkVer = wpdom.getProperty(SDK_VERSION);
assertNotNull(sdkVer);
assertEquals("4.0.1", sdkVer); // check for custom property value
} finally {
wpdom.releaseReadLock();
}
}
public void testExternalExportProjectProvider_BadCmdLine1() throws Exception {
@ -194,10 +204,14 @@ public class GeneratePDOMApplicationTest extends PDOMTestBase {
WritablePDOM wpdom= new WritablePDOM(target, new URIRelativeLocationConverter(baseURI), LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
verifyProject1Content(wpdom);
String fid= wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
assertNotNull(fid);
assertEquals("hello.world", fid); // check for id passed on command-line
wpdom.acquireReadLock();
try {
String fid = wpdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
assertNotNull(fid);
assertEquals("hello.world", fid); // check for id passed on command-line
} finally {
wpdom.releaseReadLock();
}
assertTrue(stateCount[0] == 2);
}
@ -254,8 +268,15 @@ public class GeneratePDOMApplicationTest extends PDOMTestBase {
};
WritablePDOM wpdom= new WritablePDOM(target, new URIRelativeLocationConverter(baseURI), LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
assertEquals(1, wpdom.findBindings(new char[][] {"foo".toCharArray()}, CLinkage, NPM).length);
assertEquals(0, wpdom.findBindings(new char[][] {"foo".toCharArray()}, CPPLinkage, NPM).length);
wpdom.acquireReadLock();
try {
assertEquals(1, wpdom.findBindings(new char[][] { "foo"
.toCharArray() }, CLinkage, NPM).length);
assertEquals(0, wpdom.findBindings(new char[][] { "foo"
.toCharArray() }, CPPLinkage, NPM).length);
} finally {
wpdom.releaseReadLock();
}
}
public void verifyProject1Content(WritablePDOM wpdom) throws Exception {

View file

@ -45,6 +45,7 @@ import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.content.IContentType;
@ -119,7 +120,7 @@ public class PDOMCPPBugsTest extends BaseTestCase {
String id= pdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
assertNotNull("Exported pdom ID is null", id);
String id2= ((PDOM)pdomManager.getPDOM(cproject)).getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
String id2 = getFragmentID(cproject);
assertNotNull("Project pdom ID is null", id2);
assertFalse("Project pdom ID equals export PDOM id", id2.equals(id));
@ -130,13 +131,27 @@ public class PDOMCPPBugsTest extends BaseTestCase {
assertNotNull("Exported pdom ID is null after project reindex", id3);
assertEquals("Exported pdom ID hasChanged during reindex", id, id3);
String id4= ((PDOM)pdomManager.getPDOM(cproject)).getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
String id4= getFragmentID(cproject);
assertNotNull("Reindexed project pdom ID is null", id4);
assertFalse("Reindexex project pdom ID equals exported pdom ID", id4.equals(id));
} finally {
pdom.releaseReadLock();
}
}
private String getFragmentID(final ICProject cproject) throws CoreException, InterruptedException {
PDOMManager pdomManager= CCoreInternals.getPDOMManager();
final PDOM projectPDOM = (PDOM)pdomManager.getPDOM(cproject);
String id2;
projectPDOM.acquireReadLock();
try {
id2= (projectPDOM).getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
}
finally {
projectPDOM.releaseReadLock();
}
return id2;
}
public void testInterruptingAcquireReadLock() throws Exception {
final PDOM pdom= (PDOM) CCoreInternals.getPDOMManager().getPDOM(cproject);

View file

@ -105,14 +105,20 @@ public class PDOMProviderTests extends PDOMTestBase {
}
));
IIndex index= CCorePlugin.getIndexManager().getIndex(cproject2);
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
index.acquireReadLock();
try {
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
}
finally {
index.releaseReadLock();
}
}
@ -178,41 +184,59 @@ public class PDOMProviderTests extends PDOMTestBase {
{
IIndex index= CCorePlugin.getIndexManager().getIndex(cproject2);
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
index.acquireReadLock();
try {
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
}
finally {
index.releaseReadLock();
}
}
{
IIndex index= CCorePlugin.getIndexManager().getIndex(cproject3);
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
index.acquireReadLock();
try {
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(2, bindings.length);
}
finally {
index.releaseReadLock();
}
}
{
IIndex index= CCorePlugin.getIndexManager().getIndex(new ICProject[]{cproject2, cproject3});
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(3, bindings.length);
index.acquireReadLock();
try {
IBinding[] bindings= index.findBindings("A".toCharArray(), IndexFilter.ALL, NPM);
assertEquals(1, bindings.length);
assertEquals(1, index.findDefinitions(bindings[0]).length);
bindings= index.findBindingsForPrefix("A".toCharArray(), false, new IndexFilter() {
public boolean acceptBinding(IBinding binding) {
return binding instanceof ICPPClassType;
}
}, null);
assertEquals(3, bindings.length);
} finally {
index.releaseReadLock();
}
}
}
@ -236,9 +260,9 @@ public class PDOMProviderTests extends PDOMTestBase {
wpdom.acquireWriteLock();
try {
wpdom.getDB().setVersion(1);
wpdom.close();
} finally {
wpdom.releaseWriteLock();
wpdom.close();
}
}

View file

@ -93,7 +93,7 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
// 14 - added timestamps for files (bug 149571)
// 15 - fixed offsets for pointer types and qualifier types and PDOMCPPVariable (bug 160540).
// 16 - have PDOMCPPField store type information, and PDOMCPPNamespaceAlias store what it is aliasing
// 17 - use single linked list for names in file, adds a link to enclosing defintion name.
// 17 - use single linked list for names in file, adds a link to enclosing definition name.
// 18 - distinction between c-unions and c-structs.
// 19 - alter representation of paths in the pdom (162172)
// 20 - add pointer to member types, array types, return types for functions
@ -105,13 +105,13 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
// 26 - add properties storage
// 27 - templates: classes, functions, limited nesting support, only template type parameters
// 28 - templates: class instance/specialization base classes
// 29 - includes: fixed modelling of unresolved includes (180159)
// 29 - includes: fixed modeling of unresolved includes (180159)
// 30 - templates: method/constructor templates, typedef specializations
// 31 - macros: added file locations
// 32 - support standalone function types (181936)
// 32 - support stand-alone function types (181936)
// 33 - templates: constructor instances
// 34 - fix for base classes represented by qualified names (183843)
// 35 - add scanner configuration hashcode (62366)
// 35 - add scanner configuration hash-code (62366)
// 36 - changed chunk size back to 4K (184892)
public static final int LINKAGES = Database.DATA_AREA;
@ -144,13 +144,17 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
private void loadDatabase(File dbPath, ChunkCache cache) throws CoreException {
fPath= dbPath;
final boolean lockDB= db == null || lockCount != 0;
db = new Database(fPath, cache, VERSION, isPermanentlyReadOnly());
fileIndex= null; // holds on to the database, so clear it.
db.setLocked(lockDB);
int version= db.getVersion();
if (version == VERSION) {
readLinkages();
}
db.setLocked(lockCount != 0);
}
public IIndexLocationConverter getLocationConverter() {
@ -242,32 +246,24 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
}
protected void clear() throws CoreException {
Database db = getDB();
// Clear out the database
db.clear(1);
// Zero out the File Index and Linkages
clearFileIndex();
assert lockCount < 0; // needs write-lock.
db.setVersion(VERSION);
db.putInt(PROPERTIES, 0);
db.putInt(LINKAGES, 0);
fLinkageIDCache.clear();
// Clear out the database, everything is set to zero.
getDB().clear(VERSION);
clearCaches();
}
void reloadFromFile(File file) throws CoreException {
assert lockCount < 0; // must have write lock.
File oldFile= fPath;
fLinkageIDCache.clear();
clearCaches();
try {
db.close();
} catch (CoreException e) {
CCorePlugin.log(e);
}
loadDatabase(file, db.getChunkCache());
if(!isPermanentlyReadOnly()) {
db.setWritable();
}
db.setExclusiveLock();
oldFile.delete();
}
@ -538,6 +534,7 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
--waitingReaders;
}
++lockCount;
db.setLocked(true);
}
}
@ -550,11 +547,10 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
--lockCount;
mutex.notifyAll();
clearCache= lockCount == 0;
db.setLocked(lockCount != 0);
}
if (clearCache) {
synchronized (fResultCache) {
fResultCache.clear();
}
clearResultCache();
}
}
@ -574,9 +570,10 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
* @throws IllegalStateException if this PDOM is not writable
*/
public void acquireWriteLock(int giveupReadLocks) throws InterruptedException {
assert !isPermanentlyReadOnly();
synchronized (mutex) {
if (giveupReadLocks > 0) {
// giveup on read locks
// give up on read locks
assert lockCount >= giveupReadLocks: "Not enough locks to release"; //$NON-NLS-1$
if (lockCount < giveupReadLocks) {
giveupReadLocks= lockCount;
@ -590,7 +587,7 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
while (lockCount > giveupReadLocks || waitingReaders > 0)
mutex.wait();
lockCount= -1;
db.setWritable();
db.setExclusiveLock();
}
}
@ -599,11 +596,9 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
}
public void releaseWriteLock(int establishReadLocks, boolean flush) {
synchronized(fResultCache) {
fResultCache.clear();
}
clearResultCache();
try {
db.setReadOnly(flush);
db.giveUpExclusiveLock(flush);
} catch (CoreException e) {
CCorePlugin.log(e);
}
@ -613,6 +608,7 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
if (lockCount < 0)
lockCount= establishReadLocks;
mutex.notifyAll();
db.setLocked(lockCount != 0);
}
fireChange();
}
@ -756,8 +752,20 @@ public class PDOM extends PlatformObject implements IIndexFragment, IPDOM {
}
public void close() throws CoreException {
fLinkageIDCache.clear();
db.close();
clearCaches();
}
private void clearCaches() {
fileIndex= null;
fLinkageIDCache.clear();
clearResultCache();
}
private void clearResultCache() {
synchronized (fResultCache) {
fResultCache.clear();
}
}
public long getCacheHits() {

View file

@ -61,7 +61,6 @@ import org.eclipse.cdt.internal.core.index.IndexerStateEvent;
import org.eclipse.cdt.internal.core.index.provider.IndexProviderManager;
import org.eclipse.cdt.internal.core.pdom.PDOM.IListener;
import org.eclipse.cdt.internal.core.pdom.db.ChunkCache;
import org.eclipse.cdt.internal.core.pdom.db.Database;
import org.eclipse.cdt.internal.core.pdom.dom.PDOMProjectIndexLocationConverter;
import org.eclipse.cdt.internal.core.pdom.indexer.DeltaAnalyzer;
import org.eclipse.cdt.internal.core.pdom.indexer.IndexerPreferences;
@ -1082,34 +1081,30 @@ public class PDOMManager implements IWritableIndexManager, IListener {
try {
// copy it
PDOM pdom= (PDOM) getPDOM(cproject);
pdom.acquireWriteLock();
pdom.acquireReadLock();
String oldID= null;
try {
oldID= pdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
pdom.flush();
Database db= pdom.getDB();
FileChannel from= db.getChannel();
FileChannel to = new FileOutputStream(targetLocation).getChannel();
from.transferTo(0, from.size(), to);
pdom.getDB().transferTo(to);
to.close();
} finally {
pdom.releaseWriteLock();
pdom.releaseReadLock();
}
// overwrite internal location representations
final WritablePDOM newPDOM = new WritablePDOM(targetLocation, pdom.getLocationConverter(), LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
newPDOM.acquireWriteLock();
try {
newPDOM.acquireWriteLock();
try {
newPDOM.rewriteLocations(newConverter);
newPDOM.rewriteLocations(newConverter);
// ensure fragment id has a sensible value, in case callee's do not
// overwrite their own values
String oldId= pdom.getProperty(IIndexFragment.PROPERTY_FRAGMENT_ID);
newPDOM.setProperty(IIndexFragment.PROPERTY_FRAGMENT_ID, "exported."+oldId); //$NON-NLS-1$
} finally {
newPDOM.releaseWriteLock();
}
} finally {
// ensure fragment id has a sensible value, in case callee's do not
// overwrite their own values
newPDOM.setProperty(IIndexFragment.PROPERTY_FRAGMENT_ID, "exported."+oldID); //$NON-NLS-1$
newPDOM.close();
} finally {
newPDOM.releaseWriteLock();
}
} catch(IOException ioe) {
throw new CoreException(CCorePlugin.createStatus(ioe.getMessage()));

View file

@ -71,7 +71,7 @@ public class TeamPDOMExportOperation implements IWorkspaceRunnable {
public void run(IProgressMonitor monitor) throws CoreException {
getMessageDigest();
getTargetLocation();
File tmpPDOM= null;
File tmpChecksums= null;
try {
@ -80,15 +80,15 @@ public class TeamPDOMExportOperation implements IWorkspaceRunnable {
} catch (IOException e) {
throw new CoreException(CCorePlugin.createStatus(Messages.TeamPDOMExportOperation_errorCreatingTempFile, e));
}
try {
PDOMManager pdomManager= CCoreInternals.getPDOMManager();
// wait for indexer
monitor.beginTask("", 100); //$NON-NLS-1$
pdomManager.joinIndexer(Integer.MAX_VALUE, subMonitor(monitor, 1));
checkMonitor(monitor);
// create index
IIndexLocationConverter converter= new ResourceContainerRelativeLocationConverter(ResourcesPlugin.getWorkspace().getRoot());
pdomManager.exportProjectPDOM(fProject, tmpPDOM, converter);
@ -97,12 +97,15 @@ public class TeamPDOMExportOperation implements IWorkspaceRunnable {
// create checksums
PDOM pdom= new PDOM(tmpPDOM, converter, LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
pdom.acquireReadLock();
try {
monitor.setTaskName(Messages.Checksums_taskComputeChecksums);
createChecksums(fProject, pdom, tmpChecksums, subMonitor(monitor, 94));
pdom.db.setExclusiveLock(); // The tmpPDOM is all ours.
pdom.close();
}
finally {
pdom.close();
pdom.releaseReadLock();
}
// create archive
@ -111,6 +114,9 @@ public class TeamPDOMExportOperation implements IWorkspaceRunnable {
// store preferences
monitor.setTaskName(Messages.TeamPDOMExportOperation_taskExportIndex);
IndexerPreferences.setIndexImportLocation(fProject.getProject(), fTargetLocation.toString());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
finally {
if (tmpPDOM != null) {

View file

@ -39,7 +39,7 @@ final class Chunk {
void read() throws CoreException {
try {
final ByteBuffer buf= ByteBuffer.wrap(fBuffer);
fDatabase.getChannel().read(buf, fSequenceNumber*Database.CHUNK_SIZE);
fDatabase.getFileChannel().read(buf, fSequenceNumber*Database.CHUNK_SIZE);
} catch (IOException e) {
throw new CoreException(new DBStatus(e));
}
@ -48,7 +48,7 @@ final class Chunk {
void flush() throws CoreException {
try {
final ByteBuffer buf= ByteBuffer.wrap(fBuffer);
fDatabase.getChannel().write(buf, fSequenceNumber*Database.CHUNK_SIZE);
fDatabase.getFileChannel().write(buf, fSequenceNumber*Database.CHUNK_SIZE);
} catch (IOException e) {
throw new CoreException(new DBStatus(e));
}

View file

@ -16,6 +16,7 @@ package org.eclipse.cdt.internal.core.pdom.db;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Iterator;
@ -61,15 +62,19 @@ public class Database {
private final File location;
private final RandomAccessFile file;
private boolean fWritable= false;
private boolean fPermanentlyReadOnly;
private Chunk[] chunks;
private boolean fExclusiveLock= false; // necessary for any write operation
private boolean fLocked; // necessary for any operation.
private boolean fIsMarkedIncomplete= false;
private int fVersion;
private final Chunk fHeaderChunk;
private Chunk[] fChunks;
private ChunkCache fCache;
private long malloced;
private long freed;
private long cacheHits;
private long cacheMisses;
private ChunkCache fCache;
// public for tests only, you shouldn't need these
public static final int VERSION_OFFSET = 0;
@ -86,63 +91,69 @@ public class Database {
/**
* Construct a new Database object, creating a backing file if necessary.
* @param location the local file path for the database
* @param cache the cache to be used optimisation
* @param cache the cache to be used optimization
* @param version the version number to store in the database (only applicable for new databases)
* @param permanentReadOnly whether this Database object will ever need writing to
* @throws CoreException
*/
public Database(File location, ChunkCache cache, int version, boolean permanentlyReadOnly) throws CoreException {
public Database(File location, ChunkCache cache, int version, boolean openReadOnly) throws CoreException {
try {
this.location = location;
this.fPermanentlyReadOnly= permanentlyReadOnly;
this.file = new RandomAccessFile(location, permanentlyReadOnly ? "r" : "rw"); //$NON-NLS-1$ //$NON-NLS-2$
this.file = new RandomAccessFile(location, openReadOnly ? "r" : "rw"); //$NON-NLS-1$ //$NON-NLS-2$
this.fCache= cache;
// Allocate chunk table, make sure we have at least one
long nChunks = file.length() / CHUNK_SIZE;
chunks = new Chunk[(int)nChunks];
int nChunks = Math.max(1, (int) (file.length() / CHUNK_SIZE));
fHeaderChunk= new Chunk(this, 0);
fHeaderChunk.fLocked= true; // never makes it into the cache, needed to satisfy assertions
fChunks = new Chunk[nChunks]; // chunk[0] is unused.
if (nChunks == 0) {
if(!permanentlyReadOnly) {
setWritable();
}
createNewChunk();
setVersion(version);
setReadOnly(true);
fVersion= version;
}
else {
fHeaderChunk.read();
fVersion= fHeaderChunk.getInt(0);
}
} catch (IOException e) {
throw new CoreException(new DBStatus(e));
}
}
public FileChannel getChannel() {
FileChannel getFileChannel() {
return file.getChannel();
}
public void transferTo(FileChannel target) throws IOException {
assert fLocked;
final FileChannel from= file.getChannel();
from.transferTo(0, from.size(), target);
}
public int getVersion() throws CoreException {
return getChunk(0).getInt(0);
return fVersion;
}
public void setVersion(int version) throws CoreException {
getChunk(0).putInt(0, version);
assert fExclusiveLock;
fHeaderChunk.putInt(0, version);
fVersion= version;
}
/**
* Empty the contents of the Database, make it ready to start again
* @throws CoreException
*/
public void clear(long timeout) throws CoreException {
int version= getVersion();
public void clear(int version) throws CoreException {
assert fExclusiveLock;
removeChunksFromCache();
fVersion= version;
// clear the first chunk.
Chunk header= getChunk(0);
header.clear(0, CHUNK_SIZE);
setVersion(version);
// chunks have been removed from the cache, so we are fine here.
chunks = new Chunk[] {header};
fHeaderChunk.clear(0, CHUNK_SIZE);
// chunks have been removed from the cache, so we may just reset the array of chunks.
fChunks = new Chunk[] {null};
try {
getChannel().truncate(CHUNK_SIZE);
fHeaderChunk.flush(); // zero out header chunk
file.getChannel().truncate(CHUNK_SIZE); // truncate database
}
catch (IOException e) {
CCorePlugin.log(e);
@ -152,11 +163,11 @@ public class Database {
private void removeChunksFromCache() {
synchronized (fCache) {
for (int i = 0; i < chunks.length; i++) {
Chunk chunk= chunks[i];
for (int i=1; i < fChunks.length; i++) {
Chunk chunk= fChunks[i];
if (chunk != null) {
fCache.remove(chunk);
chunks[i]= null;
fChunks[i]= null;
}
}
}
@ -168,34 +179,22 @@ public class Database {
* @throws CoreException
*/
public Chunk getChunk(int offset) throws CoreException {
int index = offset / CHUNK_SIZE;
// for performance reasons try to find chunk and mark it without
// synchronizing. This means that we might pick up a chunk that
// has been paged out, which is fine.
// Furthermore the hit-flag may not be seen by the clock-algorithm,
// which might lead to the eviction of a chunk. With the next
// cache failure we are in sync again, though.
Chunk chunk = chunks[index];
if (chunk != null && (chunk.fLocked || !fWritable)) {
chunk.fCacheHitFlag= true;
cacheHits++;
return chunk;
if (offset < CHUNK_SIZE) {
return fHeaderChunk;
}
// here is the safe code that has to be performed if we cannot
// get hold of the chunk.
synchronized(fCache) {
chunk= chunks[index];
assert fLocked;
final int index = offset / CHUNK_SIZE;
Chunk chunk= fChunks[index];
if (chunk == null) {
cacheMisses++;
chunk = chunks[index] = new Chunk(this, index);
chunk = fChunks[index] = new Chunk(this, index);
chunk.read();
}
else {
cacheHits++;
}
fCache.add(chunk, fWritable);
fCache.add(chunk, fExclusiveLock);
return chunk;
}
}
@ -207,6 +206,7 @@ public class Database {
* @return
*/
public int malloc(int size) throws CoreException {
assert fExclusiveLock;
if (size > MAX_SIZE)
// Too Big
throw new CoreException(new Status(IStatus.ERROR, CCorePlugin.PLUGIN_ID, 0,
@ -254,31 +254,33 @@ public class Database {
}
private int createNewChunk() throws CoreException {
// prepare new chunk array
final int oldLen= chunks.length;
final Chunk chunk= new Chunk(this, oldLen);
chunk.fDirty= true;
Chunk[] newchunks = new Chunk[oldLen+1];
// the content of the chunk array may be modified by the cache, so sync it.
assert fExclusiveLock;
synchronized (fCache) {
System.arraycopy(chunks, 0, newchunks, 0, oldLen);
final int oldLen= fChunks.length;
final Chunk chunk= new Chunk(this, oldLen);
chunk.fDirty= true;
Chunk[] newchunks = new Chunk[oldLen+1];
System.arraycopy(fChunks, 0, newchunks, 0, oldLen);
newchunks[oldLen]= chunk;
chunks= newchunks;
fChunks= newchunks;
fCache.add(chunk, true);
return oldLen * CHUNK_SIZE;
}
return oldLen * CHUNK_SIZE;
}
private int getFirstBlock(int blocksize) throws CoreException {
return getChunk(0).getInt((blocksize / MIN_SIZE) * INT_SIZE);
assert fLocked;
return fHeaderChunk.getInt((blocksize / MIN_SIZE) * INT_SIZE);
}
private void setFirstBlock(int blocksize, int block) throws CoreException {
getChunk(0).putInt((blocksize / MIN_SIZE) * INT_SIZE, block);
assert fExclusiveLock;
fHeaderChunk.putInt((blocksize / MIN_SIZE) * INT_SIZE, block);
}
private void removeBlock(Chunk chunk, int blocksize, int block) throws CoreException {
assert fExclusiveLock;
int prevblock = chunk.getInt(block + PREV_OFFSET);
int nextblock = chunk.getInt(block + NEXT_OFFSET);
if (prevblock != 0)
@ -291,6 +293,7 @@ public class Database {
}
private void addBlock(Chunk chunk, int blocksize, int block) throws CoreException {
assert fExclusiveLock;
// Mark our size
chunk.putInt(block, blocksize);
@ -309,6 +312,7 @@ public class Database {
* @param offset
*/
public void free(int offset) throws CoreException {
assert fExclusiveLock;
// TODO - look for opportunities to merge blocks
int block = offset - 4;
Chunk chunk = getChunk(block);
@ -382,15 +386,14 @@ public class Database {
return new ShortString(this, offset);
}
public int getChunkCount() {
return chunks.length;
}
/**
* For debugging purposes, only.
*/
public void reportFreeBlocks() throws CoreException {
System.out.println("Allocated size: " + chunks.length * CHUNK_SIZE); //$NON-NLS-1$
System.out.println("Allocated size: " + fChunks.length * CHUNK_SIZE); //$NON-NLS-1$
System.out.println("malloc'ed: " + malloced); //$NON-NLS-1$
System.out.println("free'd: " + freed); //$NON-NLS-1$
System.out.println("wasted: " + (chunks.length * CHUNK_SIZE - (malloced - freed))); //$NON-NLS-1$
System.out.println("wasted: " + (fChunks.length * CHUNK_SIZE - (malloced - freed))); //$NON-NLS-1$
System.out.println("Free blocks"); //$NON-NLS-1$
for (int bs = MIN_SIZE; bs <= CHUNK_SIZE; bs += MIN_SIZE) {
int count = 0;
@ -412,11 +415,14 @@ public class Database {
* @throws CoreException
*/
public void close() throws CoreException {
setReadOnly(true);
assert fExclusiveLock;
flush();
removeChunksFromCache();
// chunks have been removed from the cache, so we are fine
chunks= new Chunk[0];
fHeaderChunk.clear(0, CHUNK_SIZE);
fHeaderChunk.fDirty= false;
fChunks= new Chunk[] {null};
try {
file.close();
} catch (IOException e) {
@ -436,7 +442,7 @@ public class Database {
*/
void releaseChunk(final Chunk chunk) {
if (!chunk.fLocked) {
chunks[chunk.fSequenceNumber]= null;
fChunks[chunk.fSequenceNumber]= null;
}
}
@ -449,65 +455,70 @@ public class Database {
}
/**
* Marks this Database as writable. This is used for avoiding some synchronization on chunk fetching. An
* exception is thrown if this Database was constructed as a permanently read only Database.
* @see Database#Database(File, ChunkCache, int, boolean)
* @throw IllegalStateException if called on a permanently read-only database
* Asserts that database is used by one thread exclusively. This is necessary when doing
* write operations.
*/
public void setWritable() {
if(fPermanentlyReadOnly)
throw new IllegalStateException("A Database created as permanent-read-only may not be changed to writable state"); //$NON-NLS-1$
fWritable= true;
public void setExclusiveLock() {
fExclusiveLock= true;
fLocked= true;
}
public void setReadOnly(final boolean flush) throws CoreException {
if (fWritable) {
fWritable= false;
ArrayList dirtyChunks= new ArrayList();
synchronized (fCache) {
for (int i= chunks.length-1; i >= 0 ; i--) {
Chunk chunk= chunks[i];
if (chunk != null) {
if (chunk.fCacheIndex < 0) {
chunk.fLocked= false;
chunks[i]= null;
if (chunk.fDirty) {
dirtyChunks.add(chunk);
public void setLocked(boolean val) {
fLocked= val;
}
public void giveUpExclusiveLock(final boolean flush) throws CoreException {
if (fExclusiveLock) {
try {
ArrayList dirtyChunks= new ArrayList();
synchronized (fCache) {
for (int i= 1; i < fChunks.length; i++) {
Chunk chunk= fChunks[i];
if (chunk != null) {
if (chunk.fCacheIndex < 0) {
// locked chunk that has been removed from cache.
if (chunk.fDirty) {
dirtyChunks.add(chunk); // keep in fChunks until it is flushed.
}
else {
chunk.fLocked= false;
fChunks[i]= null;
}
}
}
else if (chunk.fLocked) {
if (!chunk.fDirty) {
chunk.fLocked= false;
else if (chunk.fLocked) {
// locked chunk, still in cache.
if (chunk.fDirty) {
if (flush) {
dirtyChunks.add(chunk);
}
}
else {
chunk.fLocked= false;
}
}
else if (flush) {
chunk.fLocked= false;
dirtyChunks.add(chunk);
else {
assert !chunk.fDirty; // dirty chunks must be locked.
}
}
else if (flush && chunk.fDirty) {
dirtyChunks.add(chunk);
}
}
}
// also handles header chunk
flushAndUnlockChunks(dirtyChunks, flush);
}
if (!dirtyChunks.isEmpty()) {
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
chunk.flush();
}
finally {
fExclusiveLock= false;
}
}
}
public void flush() throws CoreException {
if (fWritable) {
assert fLocked;
if (fExclusiveLock) {
try {
setReadOnly(true);
giveUpExclusiveLock(true);
}
finally {
setWritable();
setExclusiveLock();
}
return;
}
@ -515,33 +526,69 @@ public class Database {
// be careful as other readers may access chunks concurrently
ArrayList dirtyChunks= new ArrayList();
synchronized (fCache) {
for (int i= chunks.length-1; i >= 0 ; i--) {
Chunk chunk= chunks[i];
for (int i= 1; i < fChunks.length ; i++) {
Chunk chunk= fChunks[i];
if (chunk != null && chunk.fDirty) {
dirtyChunks.add(chunk);
}
}
}
if (!dirtyChunks.isEmpty()) {
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
chunk.flush();
// also handles header chunk
flushAndUnlockChunks(dirtyChunks, true);
}
private void flushAndUnlockChunks(final ArrayList dirtyChunks, boolean isComplete) throws CoreException {
assert !Thread.holdsLock(fCache);
synchronized(fHeaderChunk) {
if (!fHeaderChunk.fDirty) {
if (!(isComplete && fIsMarkedIncomplete)) {
return;
}
}
}
// only after the chunks are flushed we may unlock and release them.
synchronized (fCache) {
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
chunk.fLocked= false;
if (chunk.fCacheIndex < 0) {
chunks[chunk.fSequenceNumber]= null;
if (!dirtyChunks.isEmpty()) {
markFileIncomplete();
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
if (chunk.fDirty) {
chunk.flush();
}
}
// only after the chunks are flushed we may unlock and release them.
synchronized (fCache) {
for (Iterator it = dirtyChunks.iterator(); it.hasNext();) {
Chunk chunk = (Chunk) it.next();
chunk.fLocked= false;
if (chunk.fCacheIndex < 0) {
fChunks[chunk.fSequenceNumber]= null;
}
}
}
}
if (isComplete) {
if (fHeaderChunk.fDirty || fIsMarkedIncomplete) {
fHeaderChunk.putInt(0, fVersion);
fHeaderChunk.flush();
fIsMarkedIncomplete= false;
}
}
}
}
private void markFileIncomplete() throws CoreException {
if (!fIsMarkedIncomplete) {
fIsMarkedIncomplete= true;
try {
final ByteBuffer buf= ByteBuffer.wrap(new byte[4]);
file.getChannel().write(buf, 0);
} catch (IOException e) {
throw new CoreException(new DBStatus(e));
}
}
}
public void resetCacheCounters() {
cacheHits= cacheMisses= 0;
}

View file

@ -24,6 +24,7 @@ import org.eclipse.cdt.core.dom.ast.cpp.ICPPTemplateInstance;
import org.eclipse.cdt.core.index.IIndexBinding;
import org.eclipse.cdt.internal.core.index.IIndexFragment;
import org.eclipse.cdt.internal.core.index.IIndexFragmentBinding;
import org.eclipse.cdt.internal.core.index.IIndexFragmentBindingComparator;
import org.eclipse.cdt.internal.core.pdom.PDOM;
import org.eclipse.cdt.internal.core.pdom.db.Database;
import org.eclipse.cdt.internal.core.pdom.db.IString;
@ -289,11 +290,10 @@ public abstract class PDOMBinding extends PDOMNamedNode implements IIndexFragmen
* @return
*/
public int pdomCompareTo(PDOMBinding other) {
PDOMBinding otherBinding = (PDOMBinding) other;
int cmp = comparePDOMBindingQNs(this, otherBinding);
int cmp = comparePDOMBindingQNs(this, other);
if(cmp==0) {
int t1 = getNodeType();
int t2 = otherBinding.getNodeType();
int t2 = other.getNodeType();
return t1 < t2 ? -1 : (t1 > t2 ? 1 : 0);
}
return cmp;

View file

@ -69,22 +69,19 @@ public class GeneratePDOM implements ISafeRunnable {
try {
CCoreInternals.getPDOMManager().exportProjectPDOM(cproject, targetLocation, converter);
WritablePDOM exportedPDOM= new WritablePDOM(targetLocation, converter, LanguageManager.getInstance().getPDOMLinkageFactoryMappings());
exportedPDOM.acquireWriteLock(0);
try {
exportedPDOM.acquireWriteLock(0);
try {
Map exportProperties= pm.getExportProperties();
if(exportProperties!=null) {
for(Iterator i = exportProperties.entrySet().iterator(); i.hasNext(); ) {
Map.Entry entry = (Map.Entry) i.next();
exportedPDOM.setProperty((String) entry.getKey(), (String) entry.getValue());
}
Map exportProperties= pm.getExportProperties();
if(exportProperties!=null) {
for(Iterator i = exportProperties.entrySet().iterator(); i.hasNext(); ) {
Map.Entry entry = (Map.Entry) i.next();
exportedPDOM.setProperty((String) entry.getKey(), (String) entry.getValue());
}
} finally {
exportedPDOM.releaseWriteLock(0, true);
}
exportedPDOM.close();
}
finally {
exportedPDOM.close();
exportedPDOM.releaseWriteLock();
}
} catch(InterruptedException ie) {
String msg= MessageFormat.format(Messages.GeneratePDOM_GenericGenerationFailed, new Object[] {ie.getMessage()});