1
0
Fork 0
mirror of https://github.com/eclipse-cdt/cdt synced 2025-07-29 11:55:40 +02:00

Merge remote-tracking branch 'cdt/master' into sd90

This commit is contained in:
Andrew Gvozdev 2012-05-07 01:12:34 -04:00
commit af227ede75
10 changed files with 150 additions and 170 deletions

View file

@ -1842,25 +1842,25 @@ public class InputType extends BuildObject implements IInputType {
} }
/** /**
* Check if legacy scanner discovery method should be used. * Check if legacy scanner discovery profiles should be used.
*/ */
private boolean isLegacyScannerDiscovery() { private boolean useLegacyScannerDiscoveryProfiles() {
boolean isLanguageSettingsProvidersEnabled = false; boolean useLegacy = true;
ITool tool = getParent(); ITool tool = getParent();
if (tool != null) { if (tool != null) {
IBuildObject bo = tool.getParent(); IBuildObject toolchain = tool.getParent();
if (bo instanceof IToolChain) { if (toolchain instanceof IToolChain && ((IToolChain) toolchain).getDefaultLanguageSettingsProviderIds() != null) {
IConfiguration cfg = ((IToolChain) bo).getParent(); IConfiguration cfg = ((IToolChain) toolchain).getParent();
if (cfg!=null) { if (cfg != null && cfg.getDefaultLanguageSettingsProviderIds() != null) {
IResource rc = cfg.getOwner(); IResource rc = cfg.getOwner();
if (rc != null) { if (rc != null) {
IProject project = rc.getProject(); IProject project = rc.getProject();
isLanguageSettingsProvidersEnabled = ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project); useLegacy = !ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
} }
} }
} }
} }
return !isLanguageSettingsProvidersEnabled; return useLegacy;
} }
/** /**
@ -1879,11 +1879,12 @@ public class InputType extends BuildObject implements IInputType {
} }
public String getDiscoveryProfileIdAttribute() { public String getDiscoveryProfileIdAttribute() {
if (isLegacyScannerDiscovery()) { String discoveryProfileAttribute = getDiscoveryProfileIdAttributeInternal();
return getLegacyDiscoveryProfileIdAttribute(); if (discoveryProfileAttribute == null && useLegacyScannerDiscoveryProfiles()) {
discoveryProfileAttribute = getLegacyDiscoveryProfileIdAttribute();
} }
return getDiscoveryProfileIdAttributeInternal(); return discoveryProfileAttribute;
} }
/** /**

View file

@ -1545,19 +1545,21 @@ public class ToolChain extends HoldsOptions implements IToolChain, IMatchKeyProv
} }
/** /**
* Check if legacy scanner discovery method should be used. * Check if legacy scanner discovery profiles should be used.
*/ */
private boolean isLegacyScannerDiscovery() { private boolean useLegacyScannerDiscoveryProfiles() {
boolean isLanguageSettingsProvidersEnabled = false; boolean useLegacy = true;
if (getDefaultLanguageSettingsProviderIds() != null) {
IConfiguration cfg = getParent(); IConfiguration cfg = getParent();
if (cfg != null) { if (cfg != null && cfg.getDefaultLanguageSettingsProviderIds() != null) {
IResource rc = cfg.getOwner(); IResource rc = cfg.getOwner();
if (rc != null) { if (rc != null) {
IProject project = rc.getProject(); IProject project = rc.getProject();
isLanguageSettingsProvidersEnabled = ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project); useLegacy = !ScannerDiscoveryLegacySupport.isLanguageSettingsProvidersFunctionalityEnabled(project);
} }
} }
return !isLanguageSettingsProvidersEnabled; }
return useLegacy;
} }
/** /**
@ -1582,11 +1584,12 @@ public class ToolChain extends HoldsOptions implements IToolChain, IMatchKeyProv
@Override @Override
public String getScannerConfigDiscoveryProfileId() { public String getScannerConfigDiscoveryProfileId() {
if (isLegacyScannerDiscovery()) { String discoveryProfileId = getScannerConfigDiscoveryProfileIdInternal();
return getLegacyScannerConfigDiscoveryProfileId(); if (discoveryProfileId == null && useLegacyScannerDiscoveryProfiles()) {
discoveryProfileId = getLegacyScannerConfigDiscoveryProfileId();
} }
return getScannerConfigDiscoveryProfileIdInternal(); return discoveryProfileId;
} }
/** /**

View file

@ -533,10 +533,7 @@ public class ConfigurationDataProvider extends CConfigurationDataProvider implem
} }
providers.add(provider); providers.add(provider);
} }
} } else {
// AG TODO - should it be when empty or when ids==null?
if (providers.isEmpty()) {
providers = ScannerDiscoveryLegacySupport.getDefaultProvidersLegacy(); providers = ScannerDiscoveryLegacySupport.getDefaultProvidersLegacy();
} }

View file

@ -1923,10 +1923,11 @@
<toolChain <toolChain
archList="all" archList="all"
osList="macosx" id="cdt.managedbuild.toolchain.gnu.macosx.base"
languageSettingsProviders="org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser;org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector"
name="%ToolChainName.Macosx" name="%ToolChainName.Macosx"
targetTool="cdt.managedbuild.tool.macosx.c.linker.macosx.base;cdt.managedbuild.tool.macosx.cpp.linker.macosx.base;cdt.managedbuild.tool.gnu.archiver" osList="macosx"
id="cdt.managedbuild.toolchain.gnu.macosx.base"> targetTool="cdt.managedbuild.tool.macosx.c.linker.macosx.base;cdt.managedbuild.tool.macosx.cpp.linker.macosx.base;cdt.managedbuild.tool.gnu.archiver">
<targetPlatform <targetPlatform
id="cdt.managedbuild.target.gnu.platform.macosx.base" id="cdt.managedbuild.target.gnu.platform.macosx.base"
name="%PlatformName.Dbg" name="%PlatformName.Dbg"
@ -2053,9 +2054,10 @@
<toolChain <toolChain
archList="all" archList="all"
osList="solaris"
id="cdt.managedbuild.toolchain.gnu.solaris.base" id="cdt.managedbuild.toolchain.gnu.solaris.base"
languageSettingsProviders="org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser;org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector"
name="%ToolChainName.Solaris" name="%ToolChainName.Solaris"
osList="solaris"
superClass="cdt.managedbuild.toolchain.gnu.base"> superClass="cdt.managedbuild.toolchain.gnu.base">
<targetPlatform <targetPlatform
id="cdt.managedbuild.target.gnu.solaris.base" id="cdt.managedbuild.target.gnu.solaris.base"
@ -3256,9 +3258,9 @@
</projectType> </projectType>
<configuration <configuration
id="cdt.managedbuild.config.gnu.macosx.base"
cleanCommand="rm -rf" cleanCommand="rm -rf"
> id="cdt.managedbuild.config.gnu.macosx.base"
languageSettingsProviders="org.eclipse.cdt.ui.UserLanguageSettingsProvider;org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider;${Toolchain};-org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser">
<enablement type="CONTAINER_ATTRIBUTE" <enablement type="CONTAINER_ATTRIBUTE"
attribute="artifactExtension" attribute="artifactExtension"
value="dylib" value="dylib"
@ -3669,8 +3671,9 @@
</projectType> </projectType>
<configuration <configuration
cleanCommand="rm -rf"
id="cdt.managedbuild.config.gnu.solaris.base" id="cdt.managedbuild.config.gnu.solaris.base"
cleanCommand="rm -rf"> languageSettingsProviders="org.eclipse.cdt.ui.UserLanguageSettingsProvider;org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider;${Toolchain};-org.eclipse.cdt.managedbuilder.core.GCCBuildCommandParser">
<enablement type="CONTAINER_ATTRIBUTE" <enablement type="CONTAINER_ATTRIBUTE"
attribute="artifactExtension" attribute="artifactExtension"
value="so" value="so"

View file

@ -23,5 +23,4 @@ public interface IPDOM extends IIndexFragment {
void addListener(PDOM.IListener listener); void addListener(PDOM.IListener listener);
void removeListener(PDOM.IListener indexView); void removeListener(PDOM.IListener indexView);
} }

View file

@ -26,9 +26,9 @@ final class Chunk {
final Database fDatabase; final Database fDatabase;
final int fSequenceNumber; final int fSequenceNumber;
boolean fCacheHitFlag= false; boolean fCacheHitFlag;
boolean fDirty= false; boolean fDirty;
boolean fLocked= false; // locked chunks must not be released from cache. boolean fLocked; // locked chunks must not be released from cache.
int fCacheIndex= -1; int fCacheIndex= -1;
Chunk(Database db, int sequenceNumber) { Chunk(Database db, int sequenceNumber) {
@ -54,6 +54,7 @@ final class Chunk {
} }
fDirty= false; fDirty= false;
} }
private static int recPtrToIndex(final long offset) { private static int recPtrToIndex(final long offset) {
return (int) (offset & Database.OFFSET_IN_CHUNK_MASK); return (int) (offset & Database.OFFSET_IN_CHUNK_MASK);
} }
@ -94,7 +95,6 @@ final class Chunk {
buffer[++idx]= (byte) (value); buffer[++idx]= (byte) (value);
} }
public int getInt(final long offset) { public int getInt(final long offset) {
return getInt(fBuffer, recPtrToIndex(offset)); return getInt(fBuffer, recPtrToIndex(offset));
} }
@ -106,7 +106,6 @@ final class Chunk {
((buffer[++idx] & 0xff) << 0); ((buffer[++idx] & 0xff) << 0);
} }
/** /**
* A free Record Pointer is a pointer to a raw block, i.e. the * A free Record Pointer is a pointer to a raw block, i.e. the
* pointer is not moved past the BLOCK_HEADER_SIZE. * pointer is not moved past the BLOCK_HEADER_SIZE.
@ -119,8 +118,8 @@ final class Chunk {
} }
/** /**
* A free Record Pointer is a pointer to a raw block, i.e. the * A free Record Pointer is a pointer to a raw block,
* pointer is not moved past the BLOCK_HEADER_SIZE. * i.e. the pointer is not moved past the BLOCK_HEADER_SIZE.
*/ */
private static long expandToFreeRecPtr(int value) { private static long expandToFreeRecPtr(int value) {
/* /*
@ -134,7 +133,6 @@ final class Chunk {
return address << Database.BLOCK_SIZE_DELTA_BITS; return address << Database.BLOCK_SIZE_DELTA_BITS;
} }
/** /**
* A Record Pointer is a pointer as returned by Database.malloc(). * A Record Pointer is a pointer as returned by Database.malloc().
* This is a pointer to a block + BLOCK_HEADER_SIZE. * This is a pointer to a block + BLOCK_HEADER_SIZE.
@ -165,10 +163,9 @@ final class Chunk {
putRecPtr(value, fBuffer, idx); putRecPtr(value, fBuffer, idx);
} }
/** /**
* A free Record Pointer is a pointer to a raw block, i.e. the * A free Record Pointer is a pointer to a raw block,
* pointer is not moved past the BLOCK_HEADER_SIZE. * i.e. the pointer is not moved past the BLOCK_HEADER_SIZE.
*/ */
public void putFreeRecPtr(final long offset, final long value) { public void putFreeRecPtr(final long offset, final long value) {
assert fLocked; assert fLocked;

View file

@ -8,15 +8,14 @@
* Contributors: * Contributors:
* Markus Schorn - initial API and implementation * Markus Schorn - initial API and implementation
*******************************************************************************/ *******************************************************************************/
package org.eclipse.cdt.internal.core.pdom.db; package org.eclipse.cdt.internal.core.pdom.db;
public final class ChunkCache { public final class ChunkCache {
private static ChunkCache sSharedInstance= new ChunkCache(); private static ChunkCache sSharedInstance= new ChunkCache();
private Chunk[] fPageTable; private Chunk[] fPageTable;
private boolean fTableIsFull= false; private boolean fTableIsFull;
private int fPointer= 0; private int fPointer;
public static ChunkCache getSharedInstance() { public static ChunkCache getSharedInstance() {
return sSharedInstance; return sSharedInstance;
@ -42,8 +41,7 @@ public final class ChunkCache {
evictChunk(); evictChunk();
chunk.fCacheIndex= fPointer; chunk.fCacheIndex= fPointer;
fPageTable[fPointer]= chunk; fPageTable[fPointer]= chunk;
} } else {
else {
chunk.fCacheIndex= fPointer; chunk.fCacheIndex= fPointer;
fPageTable[fPointer]= chunk; fPageTable[fPointer]= chunk;
@ -88,8 +86,7 @@ public final class ChunkCache {
if (fTableIsFull) { if (fTableIsFull) {
fPointer= fPageTable.length-1; fPointer= fPageTable.length-1;
fTableIsFull= false; fTableIsFull= false;
} } else {
else {
fPointer--; fPointer--;
} }
chunk.fCacheIndex= -1; chunk.fCacheIndex= -1;
@ -121,8 +118,7 @@ public final class ChunkCache {
fTableIsFull= false; fTableIsFull= false;
fPointer= oldLength; fPointer= oldLength;
fPageTable= newTable; fPageTable= newTable;
} } else {
else {
for (int i= newLength; i < oldLength; i++) { for (int i= newLength; i < oldLength; i++) {
final Chunk chunk= fPageTable[i]; final Chunk chunk= fPageTable[i];
chunk.fDatabase.releaseChunk(chunk); chunk.fDatabase.releaseChunk(chunk);

View file

@ -29,7 +29,6 @@ import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.Status;
import org.eclipse.osgi.util.NLS; import org.eclipse.osgi.util.NLS;
/** /**
* Database encapsulates access to a flat binary format file with a memory-manager-like API for * Database encapsulates access to a flat binary format file with a memory-manager-like API for
* obtaining and releasing areas of storage (memory). * obtaining and releasing areas of storage (memory).
@ -126,8 +125,7 @@ public class Database {
fVersion= version; fVersion= version;
fChunks= new Chunk[1]; fChunks= new Chunk[1];
fChunksUsed = fChunksAllocated = fChunks.length; fChunksUsed = fChunksAllocated = fChunks.length;
} } else {
else {
fHeaderChunk.read(); fHeaderChunk.read();
fVersion= fHeaderChunk.getInt(VERSION_OFFSET); fVersion= fHeaderChunk.getInt(VERSION_OFFSET);
fChunks = new Chunk[nChunksOnDisk]; // chunk[0] is unused. fChunks = new Chunk[nChunksOnDisk]; // chunk[0] is unused.
@ -148,8 +146,7 @@ public class Database {
try { try {
fFile.getChannel().read(buf, position); fFile.getChannel().read(buf, position);
return; return;
} } catch (ClosedChannelException e) {
catch (ClosedChannelException e) {
// bug 219834 file may have be closed by interrupting a thread during an I/O operation. // bug 219834 file may have be closed by interrupting a thread during an I/O operation.
reopen(e, ++retries); reopen(e, ++retries);
} }
@ -158,16 +155,15 @@ public class Database {
void write(ByteBuffer buf, long position) throws IOException { void write(ByteBuffer buf, long position) throws IOException {
int retries= 0; int retries= 0;
do { while (true) {
try { try {
fFile.getChannel().write(buf, position); fFile.getChannel().write(buf, position);
return; return;
} } catch (ClosedChannelException e) {
catch (ClosedChannelException e) {
// bug 219834 file may have be closed by interrupting a thread during an I/O operation. // bug 219834 file may have be closed by interrupting a thread during an I/O operation.
reopen(e, ++retries); reopen(e, ++retries);
} }
} while(true); }
} }
private void reopen(ClosedChannelException e, int attempt) throws ClosedChannelException, FileNotFoundException { private void reopen(ClosedChannelException e, int attempt) throws ClosedChannelException, FileNotFoundException {
@ -178,7 +174,6 @@ public class Database {
openFile(); openFile();
} }
public void transferTo(FileChannel target) throws IOException { public void transferTo(FileChannel target) throws IOException {
assert fLocked; assert fLocked;
final FileChannel from= fFile.getChannel(); final FileChannel from= fFile.getChannel();
@ -222,8 +217,7 @@ public class Database {
try { try {
fHeaderChunk.flush(); // zero out header chunk fHeaderChunk.flush(); // zero out header chunk
fFile.getChannel().truncate(CHUNK_SIZE); // truncate database fFile.getChannel().truncate(CHUNK_SIZE); // truncate database
} } catch (IOException e) {
catch (IOException e) {
CCorePlugin.log(e); CCorePlugin.log(e);
} }
malloced = freed = 0; malloced = freed = 0;
@ -256,7 +250,6 @@ public class Database {
} }
} }
/** /**
* Return the Chunk that contains the given offset. * Return the Chunk that contains the given offset.
* @throws CoreException * @throws CoreException
@ -276,8 +269,7 @@ public class Database {
cacheMisses++; cacheMisses++;
chunk = fChunks[index] = new Chunk(this, index); chunk = fChunks[index] = new Chunk(this, index);
chunk.read(); chunk.read();
} } else {
else {
cacheHits++; cacheHits++;
} }
fCache.add(chunk, fExclusiveLock); fCache.add(chunk, fExclusiveLock);
@ -359,7 +351,7 @@ public class Database {
long address = (long) newChunkIndex * CHUNK_SIZE; long address = (long) newChunkIndex * CHUNK_SIZE;
/* /*
* non-dense pointers are at most 31 bits dense pointers are at most 35 bits Check the sizes here * Non-dense pointers are at most 31 bits dense pointers are at most 35 bits Check the sizes here
* and throw an exception if the address is too large. By throwing the CoreException with the * and throw an exception if the address is too large. By throwing the CoreException with the
* special status, the indexing operation should be stopped. This is desired since generally, once * special status, the indexing operation should be stopped. This is desired since generally, once
* the max size is exceeded, there are lots of errors. * the max size is exceeded, there are lots of errors.
@ -375,7 +367,7 @@ public class Database {
} }
/** /**
* for testing purposes, only. * For testing purposes, only.
*/ */
private long createNewChunks(int numChunks) throws CoreException { private long createNewChunks(int numChunks) throws CoreException {
assert fExclusiveLock; assert fExclusiveLock;
@ -411,10 +403,11 @@ public class Database {
assert fExclusiveLock; assert fExclusiveLock;
long prevblock = chunk.getFreeRecPtr(block + BLOCK_PREV_OFFSET); long prevblock = chunk.getFreeRecPtr(block + BLOCK_PREV_OFFSET);
long nextblock = chunk.getFreeRecPtr(block + BLOCK_NEXT_OFFSET); long nextblock = chunk.getFreeRecPtr(block + BLOCK_NEXT_OFFSET);
if (prevblock != 0) if (prevblock != 0) {
putFreeRecPtr(prevblock + BLOCK_NEXT_OFFSET, nextblock); putFreeRecPtr(prevblock + BLOCK_NEXT_OFFSET, nextblock);
else // we were the head } else { // we were the head
setFirstBlock(blocksize, nextblock); setFirstBlock(blocksize, nextblock);
}
if (nextblock != 0) if (nextblock != 0)
putFreeRecPtr(nextblock + BLOCK_PREV_OFFSET, prevblock); putFreeRecPtr(nextblock + BLOCK_PREV_OFFSET, prevblock);
@ -543,11 +536,12 @@ public class Database {
bytelen= 2*len; bytelen= 2*len;
} }
if (bytelen > ShortString.MAX_BYTE_LENGTH) if (bytelen > ShortString.MAX_BYTE_LENGTH) {
return new LongString(this, chars, useBytes); return new LongString(this, chars, useBytes);
else } else {
return new ShortString(this, chars, useBytes); return new ShortString(this, chars, useBytes);
} }
}
private boolean useBytes(char[] chars) { private boolean useBytes(char[] chars) {
for (char c : chars) { for (char c : chars) {
@ -557,8 +551,6 @@ public class Database {
return true; return true;
} }
public IString getString(long offset) throws CoreException { public IString getString(long offset) throws CoreException {
final int l = getInt(offset); final int l = getInt(offset);
int bytelen= l < 0 ? -l : 2 * l; int bytelen= l < 0 ? -l : 2 * l;
@ -661,24 +653,20 @@ public class Database {
// locked chunk that has been removed from cache. // locked chunk that has been removed from cache.
if (chunk.fDirty) { if (chunk.fDirty) {
dirtyChunks.add(chunk); // keep in fChunks until it is flushed. dirtyChunks.add(chunk); // keep in fChunks until it is flushed.
} } else {
else {
chunk.fLocked= false; chunk.fLocked= false;
fChunks[i]= null; fChunks[i]= null;
} }
} } else if (chunk.fLocked) {
else if (chunk.fLocked) {
// locked chunk, still in cache. // locked chunk, still in cache.
if (chunk.fDirty) { if (chunk.fDirty) {
if (flush) { if (flush) {
dirtyChunks.add(chunk); dirtyChunks.add(chunk);
} }
} } else {
else {
chunk.fLocked= false; chunk.fLocked= false;
} }
} } else {
else {
assert !chunk.fDirty; // dirty chunks must be locked. assert !chunk.fDirty; // dirty chunks must be locked.
} }
} }
@ -698,8 +686,7 @@ public class Database {
if (fExclusiveLock) { if (fExclusiveLock) {
try { try {
giveUpExclusiveLock(true); giveUpExclusiveLock(true);
} } finally {
finally {
setExclusiveLock(); setExclusiveLock();
} }
return; return;

View file

@ -14,13 +14,10 @@ import org.eclipse.core.runtime.CoreException;
/** /**
* @author Doug Schaefer * @author Doug Schaefer
*
*/ */
public interface IBTreeComparator { public interface IBTreeComparator {
/** /**
* Compare two records. Used for insert. * Compare two records. Used for insert.
*/ */
public abstract int compare(long record1, long record2) throws CoreException; public abstract int compare(long record1, long record2) throws CoreException;
} }

View file

@ -53,8 +53,8 @@ public class PDOMInclude implements IIndexFragmentInclude {
// Cached fields // Cached fields
private String fName; private String fName;
public PDOMInclude(PDOMLinkage pdom, long record) { public PDOMInclude(PDOMLinkage linkage, long record) {
this.linkage = pdom; this.linkage = linkage;
this.record = record; this.record = record;
} }