1
0
Fork 0
mirror of https://github.com/eclipse-cdt/cdt synced 2025-04-29 19:45:01 +02:00

Follow up for bug 292908: Correct reading chunks in loop.

This commit is contained in:
Markus Schorn 2010-01-08 13:48:48 +00:00
parent 164af3c7a3
commit ac54f0829b
2 changed files with 31 additions and 42 deletions

View file

@ -55,6 +55,17 @@ public class FileCharArray extends LazyCharArray {
return new CharArray(buf); return new CharArray(buf);
} }
private static char[] extractChars(CharBuffer charBuffer) {
if (charBuffer.hasArray() && charBuffer.arrayOffset() == 0) {
char[] buf = charBuffer.array();
if (buf.length == charBuffer.remaining())
return buf;
}
char[] buf = new char[charBuffer.remaining()];
charBuffer.get(buf);
return buf;
}
private String fFileName; private String fFileName;
private String fCharSet; private String fCharSet;
private FileChannel fChannel; private FileChannel fChannel;
@ -88,7 +99,7 @@ public class FileCharArray extends LazyCharArray {
} }
@Override @Override
protected long readChunkData(long fileOffset, CharBuffer dest) throws IOException { protected char[] readChunkData(long fileOffset, long[] fileEndOffsetHolder) throws IOException {
assert fChannel != null; assert fChannel != null;
final Charset charset = Charset.forName(fCharSet); final Charset charset = Charset.forName(fCharSet);
final CharsetDecoder decoder = charset.newDecoder().onMalformedInput(CodingErrorAction.REPLACE) final CharsetDecoder decoder = charset.newDecoder().onMalformedInput(CodingErrorAction.REPLACE)
@ -96,29 +107,30 @@ public class FileCharArray extends LazyCharArray {
int needBytes = (int) (CHUNK_SIZE * (double) decoder.averageCharsPerByte()); // avoid rounding errors. int needBytes = (int) (CHUNK_SIZE * (double) decoder.averageCharsPerByte()); // avoid rounding errors.
final ByteBuffer in = ByteBuffer.allocate(needBytes); final ByteBuffer in = ByteBuffer.allocate(needBytes);
final CharBuffer dest= CharBuffer.allocate(CHUNK_SIZE);
int total= 0;
boolean endOfInput= false; boolean endOfInput= false;
while(total < CHUNK_SIZE && !endOfInput) { while(dest.position() < CHUNK_SIZE && !endOfInput) {
fChannel.position(fileOffset); fChannel.position(fileOffset);
in.clear(); in.clear();
int count= fChannel.read(in); int count= fChannel.read(in);
if (count == -1) { if (count == -1) {
return fileOffset; break;
} }
endOfInput= count < in.capacity(); endOfInput= count < in.capacity();
total+= count;
in.flip(); in.flip();
decoder.decode(in, dest, endOfInput); decoder.decode(in, dest, endOfInput);
fileOffset+= in.position(); fileOffset+= in.position();
} }
return fileOffset; fileEndOffsetHolder[0]= fileOffset;
dest.flip();
return extractChars(dest);
} }
@Override @Override
protected void rereadChunkData(long fileOffset, long fileEndOffset, CharBuffer dest) { protected void rereadChunkData(long fileOffset, long fileEndOffset, char[] dest) {
FileInputStream fis; FileInputStream fis;
try { try {
fis = new FileInputStream(fFileName); fis = new FileInputStream(fFileName);
@ -128,7 +140,7 @@ public class FileCharArray extends LazyCharArray {
} }
try { try {
FileChannel channel = fis.getChannel(); FileChannel channel = fis.getChannel();
decode(channel, fileOffset, fileEndOffset, dest); decode(channel, fileOffset, fileEndOffset, CharBuffer.wrap(dest));
} catch (IOException e) { } catch (IOException e) {
// File cannot be read // File cannot be read
} finally { } finally {

View file

@ -11,7 +11,6 @@
package org.eclipse.cdt.internal.core.parser.scanner; package org.eclipse.cdt.internal.core.parser.scanner;
import java.lang.ref.SoftReference; import java.lang.ref.SoftReference;
import java.nio.CharBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -23,20 +22,6 @@ public abstract class LazyCharArray extends AbstractCharArray {
private final static int CHUNK_BITS= 16; // 2^16 == 64K private final static int CHUNK_BITS= 16; // 2^16 == 64K
protected final static int CHUNK_SIZE= 1 << CHUNK_BITS; protected final static int CHUNK_SIZE= 1 << CHUNK_BITS;
/**
* Utility method to extract char[] out of CharBuffer.
*/
protected static char[] extractChars(CharBuffer charBuffer) {
if (charBuffer.hasArray() && charBuffer.arrayOffset() == 0) {
char[] buf = charBuffer.array();
if (buf.length == charBuffer.remaining())
return buf;
}
char[] buf = new char[charBuffer.remaining()];
charBuffer.get(buf);
return buf;
}
protected static class Chunk { protected static class Chunk {
final int fDataLength; final int fDataLength;
final long fFileOffset; final long fFileOffset;
@ -77,7 +62,8 @@ public abstract class LazyCharArray extends AbstractCharArray {
if (fLength >= 0) if (fLength >= 0)
return offset < fLength; return offset < fLength;
return offset < fChunks.size() << CHUNK_BITS; assert offset < fChunks.size() << CHUNK_BITS;
return true;
} }
private void readUpTo(int offset) { private void readUpTo(int offset) {
@ -140,23 +126,22 @@ public abstract class LazyCharArray extends AbstractCharArray {
final int chunkCount = fChunks.size(); final int chunkCount = fChunks.size();
long fileOffset= chunkCount == 0 ? 0 : fChunks.get(chunkCount-1).fFileEndOffset; long fileOffset= chunkCount == 0 ? 0 : fChunks.get(chunkCount-1).fFileEndOffset;
try { try {
CharBuffer dest= CharBuffer.allocate(CHUNK_SIZE);
for (int i = chunkCount; i <= chunkOffset; i++) { for (int i = chunkCount; i <= chunkOffset; i++) {
dest.clear(); long[] fileEndOffset= {0};
long fileEndOffset= readChunkData(fileOffset, dest); char[] data= readChunkData(fileOffset, fileEndOffset);
dest.flip(); final int charCount= data.length;
final int charCount= dest.remaining();
if (charCount == 0) { if (charCount == 0) {
fLength= fChunks.size() * CHUNK_SIZE; fLength= fChunks.size() * CHUNK_SIZE;
break; break;
} }
// New chunk // New chunk
Chunk chunk= new Chunk(fileOffset, fileEndOffset, extractChars(dest)); Chunk chunk= new Chunk(fileOffset, fileEndOffset[0], data);
fChunks.add(chunk); fChunks.add(chunk);
if (charCount < CHUNK_SIZE) { if (charCount < CHUNK_SIZE) {
fLength= (fChunks.size()-1) * CHUNK_SIZE + charCount; fLength= (fChunks.size()-1) * CHUNK_SIZE + charCount;
break; break;
} }
fileOffset= fileEndOffset[0];
} }
} catch (Exception e) { } catch (Exception e) {
// File cannot be read // File cannot be read
@ -170,16 +155,8 @@ public abstract class LazyCharArray extends AbstractCharArray {
} }
private char[] loadChunkData(Chunk chunk) { private char[] loadChunkData(Chunk chunk) {
CharBuffer dest= CharBuffer.allocate(chunk.fDataLength); char[] result= new char[chunk.fDataLength];
rereadChunkData(chunk.fFileOffset, chunk.fFileEndOffset, dest); rereadChunkData(chunk.fFileOffset, chunk.fFileEndOffset, result);
dest.flip();
char[] result= extractChars(dest);
if (result.length != chunk.fDataLength) {
// In case the file changed
char[] copy= new char[chunk.fDataLength];
System.arraycopy(result, 0, copy, 0, Math.min(result.length, copy.length));
result= copy;
}
chunk.fData= new SoftReference<char[]>(result); chunk.fData= new SoftReference<char[]>(result);
return result; return result;
} }
@ -188,11 +165,11 @@ public abstract class LazyCharArray extends AbstractCharArray {
* Read the chunk data at the given source offset and provide the end-offset in the * Read the chunk data at the given source offset and provide the end-offset in the
* source. * source.
*/ */
protected abstract long readChunkData(long sourceOffset, CharBuffer dest) throws Exception; protected abstract char[] readChunkData(long sourceOffset, long[] sourceEndOffsetHolder) throws Exception;
/** /**
* Read the chunk data at the given source range. In case the source range no longer (fully) exists, * Read the chunk data at the given source range. In case the source range no longer (fully) exists,
* read as much as possible. * read as much as possible.
*/ */
protected abstract void rereadChunkData(long fileOffset, long fileEndOffset, CharBuffer dest); protected abstract void rereadChunkData(long fileOffset, long fileEndOffset, char[] dest);
} }