Browse Source

Merge changes I3668a396,I18f48321,I121f356c

* changes:
  Unpack and cache large deltas as loose objects
  Remember loose objects and fast-track their lookup
  Correctly name DeltaBaseCache
stable-0.9
Matthias Sohn 14 years ago committed by Code Review
parent
commit
0e8ef77840
  1. 8
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java
  2. 195
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/DeltaBaseCache.java
  3. 5
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java
  4. 52
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/LargePackedDeltaObject.java
  5. 64
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java
  6. 50
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java
  7. 14
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/PackFile.java
  8. 200
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/UnpackedObjectCache.java
  9. 2
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/WindowCache.java
  10. 4
      org.eclipse.jgit/src/org/eclipse/jgit/storage/file/WindowCacheConfig.java

8
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java

@ -54,7 +54,6 @@ import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectDatabase;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdSubclassMap;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.storage.pack.ObjectToPack;
import org.eclipse.jgit.storage.pack.PackWriter;
@ -113,7 +112,7 @@ class CachedObjectDirectory extends FileObjectDatabase {
}
@Override
public ObjectInserter newInserter() {
public ObjectDirectoryInserter newInserter() {
return wrapped.newInserter();
}
@ -213,6 +212,11 @@ class CachedObjectDirectory extends FileObjectDatabase {
throw new UnsupportedOperationException();
}
@Override
boolean insertUnpackedObject(File tmp, ObjectId objectId, boolean force) {
return wrapped.insertUnpackedObject(tmp, objectId, force);
}
@Override
void selectObjectRepresentation(PackWriter packer, ObjectToPack otp,
WindowCursor curs) throws IOException {

195
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/DeltaBaseCache.java

@ -0,0 +1,195 @@
/*
* Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.file;
import java.lang.ref.SoftReference;
class DeltaBaseCache {
private static final int CACHE_SZ = 1024;
private static final SoftReference<Entry> DEAD;
private static int hash(final long position) {
return (((int) position) << 22) >>> 22;
}
private static int maxByteCount;
private static final Slot[] cache;
private static Slot lruHead;
private static Slot lruTail;
private static int openByteCount;
static {
DEAD = new SoftReference<Entry>(null);
maxByteCount = new WindowCacheConfig().getDeltaBaseCacheLimit();
cache = new Slot[CACHE_SZ];
for (int i = 0; i < CACHE_SZ; i++)
cache[i] = new Slot();
}
static synchronized void reconfigure(final WindowCacheConfig cfg) {
final int dbLimit = cfg.getDeltaBaseCacheLimit();
if (maxByteCount != dbLimit) {
maxByteCount = dbLimit;
releaseMemory();
}
}
static synchronized Entry get(final PackFile pack, final long position) {
final Slot e = cache[hash(position)];
if (e.provider == pack && e.position == position) {
final Entry buf = e.data.get();
if (buf != null) {
moveToHead(e);
return buf;
}
}
return null;
}
static synchronized void store(final PackFile pack, final long position,
final byte[] data, final int objectType) {
if (data.length > maxByteCount)
return; // Too large to cache.
final Slot e = cache[hash(position)];
clearEntry(e);
openByteCount += data.length;
releaseMemory();
e.provider = pack;
e.position = position;
e.sz = data.length;
e.data = new SoftReference<Entry>(new Entry(data, objectType));
moveToHead(e);
}
private static void releaseMemory() {
while (openByteCount > maxByteCount && lruTail != null) {
final Slot currOldest = lruTail;
final Slot nextOldest = currOldest.lruPrev;
clearEntry(currOldest);
currOldest.lruPrev = null;
currOldest.lruNext = null;
if (nextOldest == null)
lruHead = null;
else
nextOldest.lruNext = null;
lruTail = nextOldest;
}
}
static synchronized void purge(final PackFile file) {
for (final Slot e : cache) {
if (e.provider == file) {
clearEntry(e);
unlink(e);
}
}
}
private static void moveToHead(final Slot e) {
unlink(e);
e.lruPrev = null;
e.lruNext = lruHead;
if (lruHead != null)
lruHead.lruPrev = e;
else
lruTail = e;
lruHead = e;
}
private static void unlink(final Slot e) {
final Slot prev = e.lruPrev;
final Slot next = e.lruNext;
if (prev != null)
prev.lruNext = next;
if (next != null)
next.lruPrev = prev;
}
private static void clearEntry(final Slot e) {
openByteCount -= e.sz;
e.provider = null;
e.data = DEAD;
e.sz = 0;
}
private DeltaBaseCache() {
throw new UnsupportedOperationException();
}
static class Entry {
final byte[] data;
final int type;
Entry(final byte[] aData, final int aType) {
data = aData;
type = aType;
}
}
private static class Slot {
Slot lruPrev;
Slot lruNext;
PackFile provider;
long position;
int sz;
SoftReference<Entry> data = DEAD;
}
}

5
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java

@ -62,6 +62,9 @@ abstract class FileObjectDatabase extends ObjectDatabase {
return new WindowCursor(this);
}
@Override
public abstract ObjectDirectoryInserter newInserter();
/**
* Does the requested object exist in this database?
* <p>
@ -246,6 +249,8 @@ abstract class FileObjectDatabase extends ObjectDatabase {
abstract long getObjectSize2(WindowCursor curs, String objectName,
AnyObjectId objectId) throws IOException;
abstract boolean insertUnpackedObject(File tmp, ObjectId id, boolean force);
abstract FileObjectDatabase newCachedFileObjectDatabase();
static class AlternateHandle {

52
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/LargePackedDeltaObject.java

@ -44,9 +44,12 @@
package org.eclipse.jgit.storage.file;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.DataFormatException;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
@ -58,7 +61,6 @@ import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.storage.pack.BinaryDelta;
import org.eclipse.jgit.storage.pack.DeltaStream;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.io.TeeInputStream;
class LargePackedDeltaObject extends ObjectLoader {
@ -165,14 +167,39 @@ class LargePackedDeltaObject extends ObjectLoader {
@Override
public ObjectStream openStream() throws MissingObjectException, IOException {
// If the object was recently unpacked, its available loose.
// The loose format is going to be faster to access than a
// delta applied on top of a base. Use that whenever we can.
//
final ObjectId myId = getObjectId();
final WindowCursor wc = new WindowCursor(db);
ObjectLoader ldr = db.openObject2(wc, myId.name(), myId);
if (ldr != null)
return ldr.openStream();
InputStream in = open(wc);
in = new BufferedInputStream(in, 8192);
return new ObjectStream.Filter(getType(), size, in) {
// While we inflate the object, also deflate it back as a loose
// object. This will later be cleaned up by a gc pass, but until
// then we will reuse the loose form by the above code path.
//
int myType = getType();
long mySize = getSize();
final ObjectDirectoryInserter odi = db.newInserter();
final File tmp = odi.newTempFile();
DeflaterOutputStream dOut = odi.compress(new FileOutputStream(tmp));
odi.writeHeader(dOut, myType, mySize);
in = new TeeInputStream(in, dOut);
return new ObjectStream.Filter(myType, mySize, in) {
@Override
public void close() throws IOException {
wc.release();
super.close();
odi.release();
wc.release();
db.insertUnpackedObject(tmp, myId, true /* force creation */);
}
};
}
@ -195,13 +222,9 @@ class LargePackedDeltaObject extends ObjectLoader {
final ObjectLoader base = pack.load(wc, baseOffset);
DeltaStream ds = new DeltaStream(delta) {
private long baseSize = SIZE_UNKNOWN;
private TemporaryBuffer.LocalFile buffer;
@Override
protected InputStream openBase() throws IOException {
if (buffer != null)
return buffer.openInputStream();
InputStream in;
if (base instanceof LargePackedDeltaObject)
in = ((LargePackedDeltaObject) base).open(wc);
@ -213,9 +236,7 @@ class LargePackedDeltaObject extends ObjectLoader {
else if (in instanceof ObjectStream)
baseSize = ((ObjectStream) in).getSize();
}
buffer = new TemporaryBuffer.LocalFile(db.getDirectory());
return new TeeInputStream(in, buffer);
return in;
}
@Override
@ -228,14 +249,11 @@ class LargePackedDeltaObject extends ObjectLoader {
}
return baseSize;
}
@Override
public void close() throws IOException {
super.close();
if (buffer != null)
buffer.destroy();
}
};
if (type == Constants.OBJ_BAD) {
if (!(base instanceof LargePackedDeltaObject))
type = base.getType();
}
if (size == SIZE_UNKNOWN)
size = ds.getSize();
return ds;

64
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java

@ -69,7 +69,6 @@ import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectDatabase;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.RepositoryCache;
import org.eclipse.jgit.lib.RepositoryCache.FileKey;
@ -117,6 +116,8 @@ public class ObjectDirectory extends FileObjectDatabase {
private final AtomicReference<AlternateHandle[]> alternates;
private final UnpackedObjectCache unpackedObjectCache;
/**
* Initialize a reference to an on-disk object directory.
*
@ -140,6 +141,7 @@ public class ObjectDirectory extends FileObjectDatabase {
packDirectory = new File(objects, "pack");
alternatesFile = new File(infoDirectory, "alternates");
packList = new AtomicReference<PackList>(NO_PACKS);
unpackedObjectCache = new UnpackedObjectCache();
this.fs = fs;
alternates = new AtomicReference<AlternateHandle[]>();
@ -173,12 +175,14 @@ public class ObjectDirectory extends FileObjectDatabase {
}
@Override
public ObjectInserter newInserter() {
public ObjectDirectoryInserter newInserter() {
return new ObjectDirectoryInserter(this, config);
}
@Override
public void close() {
unpackedObjectCache.clear();
final PackList packs = packList.get();
packList.set(NO_PACKS);
for (final PackFile p : packs.packs)
@ -255,6 +259,8 @@ public class ObjectDirectory extends FileObjectDatabase {
}
boolean hasObject1(final AnyObjectId objectId) {
if (unpackedObjectCache.isUnpacked(objectId))
return true;
for (final PackFile p : packList.get().packs) {
try {
if (p.hasObject(objectId)) {
@ -328,6 +334,14 @@ public class ObjectDirectory extends FileObjectDatabase {
ObjectLoader openObject1(final WindowCursor curs,
final AnyObjectId objectId) throws IOException {
if (unpackedObjectCache.isUnpacked(objectId)) {
ObjectLoader ldr = openObject2(curs, objectId.name(), objectId);
if (ldr != null)
return ldr;
else
unpackedObjectCache.remove(objectId);
}
PackList pList = packList.get();
SEARCH: for (;;) {
for (final PackFile p : pList.packs) {
@ -429,15 +443,61 @@ public class ObjectDirectory extends FileObjectDatabase {
File path = fileFor(objectName);
FileInputStream in = new FileInputStream(path);
try {
unpackedObjectCache.add(objectId);
return UnpackedObject.open(in, path, objectId, curs);
} finally {
in.close();
}
} catch (FileNotFoundException noFile) {
unpackedObjectCache.remove(objectId);
return null;
}
}
@Override
boolean insertUnpackedObject(File tmp, ObjectId id, boolean force) {
if (!force && has(id)) {
// Object is already in the repository, remove temporary file.
//
tmp.delete();
return true;
}
tmp.setReadOnly();
final File dst = fileFor(id);
if (force && dst.exists()) {
tmp.delete();
return true;
}
if (tmp.renameTo(dst)) {
unpackedObjectCache.add(id);
return true;
}
// Maybe the directory doesn't exist yet as the object
// directories are always lazily created. Note that we
// try the rename first as the directory likely does exist.
//
dst.getParentFile().mkdir();
if (tmp.renameTo(dst)) {
unpackedObjectCache.add(id);
return true;
}
if (!force && has(id)) {
tmp.delete();
return true;
}
// The object failed to be renamed into its proper
// location and it doesn't exist in the repository
// either. We really don't know what went wrong, so
// fail.
//
tmp.delete();
return false;
}
boolean tryAgain1() {
final PackList old = packList.get();
if (old.tryAgain(packDirectory.lastModified()))

50
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java

@ -83,36 +83,10 @@ class ObjectDirectoryInserter extends ObjectInserter {
final MessageDigest md = digest();
final File tmp = toTemp(md, type, len, is);
final ObjectId id = ObjectId.fromRaw(md.digest());
if (db.has(id)) {
// Object is already in the repository, remove temporary file.
//
tmp.delete();
if (db.insertUnpackedObject(tmp, id, false /* no duplicate */))
return id;
}
final File dst = db.fileFor(id);
if (tmp.renameTo(dst))
return id;
// Maybe the directory doesn't exist yet as the object
// directories are always lazily created. Note that we
// try the rename first as the directory likely does exist.
//
dst.getParentFile().mkdir();
if (tmp.renameTo(dst))
return id;
if (db.has(id)) {
tmp.delete();
return id;
}
// The object failed to be renamed into its proper
// location and it doesn't exist in the repository
// either. We really don't know what went wrong, so
// fail.
//
tmp.delete();
throw new ObjectWritingException("Unable to create new object: " + dst);
}
@ -136,15 +110,12 @@ class ObjectDirectoryInserter extends ObjectInserter {
final InputStream is) throws IOException, FileNotFoundException,
Error {
boolean delete = true;
File tmp = File.createTempFile("noz", null, db.getDirectory());
File tmp = newTempFile();
try {
DigestOutputStream dOut = new DigestOutputStream(
compress(new FileOutputStream(tmp)), md);
try {
dOut.write(Constants.encodedTypeString(type));
dOut.write((byte) ' ');
dOut.write(Constants.encodeASCII(len));
dOut.write((byte) 0);
writeHeader(dOut, type, len);
final byte[] buf = buffer();
while (len > 0) {
@ -158,7 +129,6 @@ class ObjectDirectoryInserter extends ObjectInserter {
dOut.close();
}
tmp.setReadOnly();
delete = false;
return tmp;
} finally {
@ -167,7 +137,19 @@ class ObjectDirectoryInserter extends ObjectInserter {
}
}
private DeflaterOutputStream compress(final OutputStream out) {
void writeHeader(OutputStream out, final int type, long len)
throws IOException {
out.write(Constants.encodedTypeString(type));
out.write((byte) ' ');
out.write(Constants.encodeASCII(len));
out.write((byte) 0);
}
File newTempFile() throws IOException {
return File.createTempFile("noz", null, db.getDirectory());
}
DeflaterOutputStream compress(final OutputStream out) {
if (deflate == null)
deflate = new Deflater(config.get(CoreConfig.KEY).getCompression());
else

14
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/PackFile.java

@ -220,7 +220,7 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
* Close the resources utilized by this repository
*/
public void close() {
UnpackedObjectCache.purge(this);
DeltaBaseCache.purge(this);
WindowCache.purge(this);
synchronized (this) {
loadedIdx = null;
@ -274,14 +274,6 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
return getReverseIdx().findObject(offset);
}
private final UnpackedObjectCache.Entry readCache(final long position) {
return UnpackedObjectCache.get(this, position);
}
private final void saveCache(final long position, final byte[] data, final int type) {
UnpackedObjectCache.store(this, position, data, type);
}
private final byte[] decompress(final long position, final long totalSize,
final WindowCursor curs) throws IOException, DataFormatException {
final byte[] dstbuf = new byte[(int) totalSize];
@ -700,7 +692,7 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
byte[] data;
int type;
UnpackedObjectCache.Entry e = readCache(posBase);
DeltaBaseCache.Entry e = DeltaBaseCache.get(this, posBase);
if (e != null) {
data = e.data;
type = e.type;
@ -715,7 +707,7 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
}
data = p.getCachedBytes();
type = p.getType();
saveCache(posBase, data, type);
DeltaBaseCache.store(this, posBase, data, type);
}
// At this point we have the base, and its small, and the delta

200
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/UnpackedObjectCache.java

@ -1,5 +1,5 @@
/*
* Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
* Copyright (C) 2010, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@ -43,153 +43,107 @@
package org.eclipse.jgit.storage.file;
import java.lang.ref.SoftReference;
import java.util.concurrent.atomic.AtomicReferenceArray;
class UnpackedObjectCache {
private static final int CACHE_SZ = 1024;
private static final SoftReference<Entry> DEAD;
private static int hash(final long position) {
return (((int) position) << 22) >>> 22;
}
private static int maxByteCount;
private static final Slot[] cache;
private static Slot lruHead;
private static Slot lruTail;
private static int openByteCount;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
static {
DEAD = new SoftReference<Entry>(null);
maxByteCount = new WindowCacheConfig().getDeltaBaseCacheLimit();
cache = new Slot[CACHE_SZ];
for (int i = 0; i < CACHE_SZ; i++)
cache[i] = new Slot();
}
static synchronized void reconfigure(final WindowCacheConfig cfg) {
final int dbLimit = cfg.getDeltaBaseCacheLimit();
if (maxByteCount != dbLimit) {
maxByteCount = dbLimit;
releaseMemory();
}
}
static synchronized Entry get(final PackFile pack, final long position) {
final Slot e = cache[hash(position)];
if (e.provider == pack && e.position == position) {
final Entry buf = e.data.get();
if (buf != null) {
moveToHead(e);
return buf;
}
}
return null;
}
static synchronized void store(final PackFile pack, final long position,
final byte[] data, final int objectType) {
if (data.length > maxByteCount)
return; // Too large to cache.
/** Remembers objects that are currently unpacked. */
class UnpackedObjectCache {
private static final int INITIAL_BITS = 5; // size = 32
final Slot e = cache[hash(position)];
clearEntry(e);
private static final int MAX_BITS = 11; // size = 2048
openByteCount += data.length;
releaseMemory();
private volatile Table table;
e.provider = pack;
e.position = position;
e.sz = data.length;
e.data = new SoftReference<Entry>(new Entry(data, objectType));
moveToHead(e);
UnpackedObjectCache() {
table = new Table(INITIAL_BITS);
}
private static void releaseMemory() {
while (openByteCount > maxByteCount && lruTail != null) {
final Slot currOldest = lruTail;
final Slot nextOldest = currOldest.lruPrev;
clearEntry(currOldest);
currOldest.lruPrev = null;
currOldest.lruNext = null;
if (nextOldest == null)
lruHead = null;
else
nextOldest.lruNext = null;
lruTail = nextOldest;
}
boolean isUnpacked(AnyObjectId objectId) {
return table.contains(objectId);
}
static synchronized void purge(final PackFile file) {
for (final Slot e : cache) {
if (e.provider == file) {
clearEntry(e);
unlink(e);
}
void add(AnyObjectId objectId) {
Table t = table;
if (t.add(objectId)) {
// The object either already exists in the table, or was
// successfully added. Either way leave the table alone.
//
} else {
// The object won't fit into the table. Implement a crude
// cache removal by just dropping the table away, but double
// it in size for the next incarnation.
//
Table n = new Table(Math.min(t.bits + 1, MAX_BITS));
n.add(objectId);
table = n;
}
}
private static void moveToHead(final Slot e) {
unlink(e);
e.lruPrev = null;
e.lruNext = lruHead;
if (lruHead != null)
lruHead.lruPrev = e;
else
lruTail = e;
lruHead = e;
void remove(AnyObjectId objectId) {
if (isUnpacked(objectId))
clear();
}
private static void unlink(final Slot e) {
final Slot prev = e.lruPrev;
final Slot next = e.lruNext;
if (prev != null)
prev.lruNext = next;
if (next != null)
next.lruPrev = prev;
void clear() {
table = new Table(INITIAL_BITS);
}
private static void clearEntry(final Slot e) {
openByteCount -= e.sz;
e.provider = null;
e.data = DEAD;
e.sz = 0;
}
private static class Table {
private static final int MAX_CHAIN = 8;
private UnpackedObjectCache() {
throw new UnsupportedOperationException();
}
private final AtomicReferenceArray<ObjectId> ids;
static class Entry {
final byte[] data;
private final int shift;
final int type;
final int bits;
Entry(final byte[] aData, final int aType) {
data = aData;
type = aType;
Table(int bits) {
this.ids = new AtomicReferenceArray<ObjectId>(1 << bits);
this.shift = 32 - bits;
this.bits = bits;
}
}
private static class Slot {
Slot lruPrev;
boolean contains(AnyObjectId toFind) {
int i = index(toFind);
for (int n = 0; n < MAX_CHAIN; n++) {
ObjectId obj = ids.get(i);
if (obj == null)
break;
Slot lruNext;
if (AnyObjectId.equals(obj, toFind))
return true;
PackFile provider;
long position;
if (++i == ids.length())
i = 0;
}
return false;
}
int sz;
boolean add(AnyObjectId toAdd) {
int i = index(toAdd);
for (int n = 0; n < MAX_CHAIN;) {
ObjectId obj = ids.get(i);
if (obj == null) {
if (ids.compareAndSet(i, null, toAdd.copy()))
return true;
else
continue;
}
if (AnyObjectId.equals(obj, toAdd))
return true;
if (++i == ids.length())
i = 0;
n++;
}
return false;
}
SoftReference<Entry> data = DEAD;
private int index(AnyObjectId id) {
return id.hashCode() >>> shift;
}
}
}

2
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/WindowCache.java

@ -187,7 +187,7 @@ public class WindowCache {
oc.removeAll();
cache = nc;
streamFileThreshold = cfg.getStreamFileThreshold();
UnpackedObjectCache.reconfigure(cfg);
DeltaBaseCache.reconfigure(cfg);
}
static int getStreamFileThreshold() {

4
org.eclipse.jgit/src/org/eclipse/jgit/storage/file/WindowCacheConfig.java

@ -146,7 +146,7 @@ public class WindowCacheConfig {
}
/**
* @return maximum number of bytes to cache in {@link UnpackedObjectCache}
* @return maximum number of bytes to cache in {@link DeltaBaseCache}
* for inflated, recently accessed objects, without delta chains.
* <b>Default 10 MB.</b>
*/
@ -157,7 +157,7 @@ public class WindowCacheConfig {
/**
* @param newLimit
* maximum number of bytes to cache in
* {@link UnpackedObjectCache} for inflated, recently accessed
* {@link DeltaBaseCache} for inflated, recently accessed
* objects, without delta chains.
*/
public void setDeltaBaseCacheLimit(final int newLimit) {

Loading…
Cancel
Save