Browse Source

DHT: Use a proper HashMap for RecentChunk lookups

A linear search is somewhat acceptable for only 4 recent chunks, but
a HashMap based lookup would be better. The table will have 16 slots
by default and given the hashCode() of ChunkKey is derived from the
SHA-1 of the chunk, each chunk will fall into its own bucket within
the table and thus evaluate only 1 entry during lookup instead of 4.

Some users may also want to devote more memory to the recent chunks,
in which case expanding this list to a longer length will help to
reduce chunk faults, but would increase search time. Using a HashMap
will help this code to scale to larger sizes better.

Change-Id: Ia41b7a1cc69ad27b85749e3b74cbf8d0aa338044
Signed-off-by: Shawn O. Pearce <spearce@spearce.org>
stable-1.1
Shawn O. Pearce 14 years ago
parent
commit
1e6b02643c
  1. 18
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java

18
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java

@ -44,6 +44,7 @@
package org.eclipse.jgit.storage.dht; package org.eclipse.jgit.storage.dht;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectLoader;
@ -57,6 +58,8 @@ final class RecentChunks {
private final int maxSize; private final int maxSize;
private final HashMap<ChunkKey, Node> byKey;
private int curSize; private int curSize;
private Node lruHead; private Node lruHead;
@ -67,36 +70,36 @@ final class RecentChunks {
this.reader = reader; this.reader = reader;
this.stats = reader.getStatistics(); this.stats = reader.getStatistics();
this.maxSize = reader.getOptions().getRecentChunkCacheSize(); this.maxSize = reader.getOptions().getRecentChunkCacheSize();
this.byKey = new HashMap<ChunkKey, Node>();
} }
PackChunk get(ChunkKey key) { PackChunk get(ChunkKey key) {
for (Node n = lruHead; n != null; n = n.next) { Node n = byKey.get(key);
if (key.equals(n.chunk.getChunkKey())) { if (n != null) {
hit(n); hit(n);
stats.recentChunks_Hits++; stats.recentChunks_Hits++;
return n.chunk; return n.chunk;
} }
}
stats.recentChunks_Miss++; stats.recentChunks_Miss++;
return null; return null;
} }
void put(PackChunk chunk) { void put(PackChunk chunk) {
for (Node n = lruHead; n != null; n = n.next) { Node n = byKey.get(chunk.getChunkKey());
if (n.chunk == chunk) { if (n != null && n.chunk == chunk) {
hit(n); hit(n);
return; return;
} }
}
Node n;
if (curSize < maxSize) { if (curSize < maxSize) {
n = new Node(); n = new Node();
curSize++; curSize++;
} else { } else {
n = lruTail; n = lruTail;
byKey.remove(n.chunk.getChunkKey());
} }
n.chunk = chunk; n.chunk = chunk;
byKey.put(chunk.getChunkKey(), n);
hit(n); hit(n);
} }
@ -167,6 +170,7 @@ final class RecentChunks {
curSize = 0; curSize = 0;
lruHead = null; lruHead = null;
lruTail = null; lruTail = null;
byKey.clear();
} }
private void hit(Node n) { private void hit(Node n) {

Loading…
Cancel
Save