Browse Source

Merge branch 'master' into stable-4.8

* master:
  Fix out-of-bounds exception in RepoCommand#relative
  Fix null return from FS.readPipe when command fails to launch
  RenameDetector: Clarify rename limits <= 0
  Remove unnecessary cast for DfsReader
  Allow DfsReader to be subclassed
  Track read IO for DfsReader
  Fix javadoc of TooLargeObjectInPackException
  Exclude refs/tags from bitmap commit selection

Change-Id: I9cd20ded108d2e5d81fa1f0c2cb9aa0eabe1f256
stable-4.8
Matthias Sohn 8 years ago
parent
commit
7e1a11f292
  1. 1
      org.eclipse.jgit.test/tst/org/eclipse/jgit/gitrepo/RepoCommandTest.java
  2. 2
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
  3. 2
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java
  4. 16
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java
  5. 15
      org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java
  6. 3
      org.eclipse.jgit/src/org/eclipse/jgit/api/errors/TooLargeObjectInPackException.java
  7. 4
      org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java
  8. 31
      org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
  9. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
  10. 41
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
  11. 4
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
  12. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
  13. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
  14. 22
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
  15. 33
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
  16. 139
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
  17. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
  18. 45
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
  19. 96
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
  20. 18
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java
  21. 2
      org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
  22. 8
      org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java

1
org.eclipse.jgit.test/tst/org/eclipse/jgit/gitrepo/RepoCommandTest.java

@ -1120,6 +1120,7 @@ public class RepoCommandTest extends RepositoryTestCase {
testRelative("a/", "a/b", "b"); testRelative("a/", "a/b", "b");
testRelative("/a/b/c", "/b/c", "../../b/c"); testRelative("/a/b/c", "/b/c", "../../b/c");
testRelative("/abc", "bcd", "bcd"); testRelative("/abc", "bcd", "bcd");
testRelative("abc", "def", "def");
testRelative("abc", "/bcd", "/bcd"); testRelative("abc", "/bcd", "/bcd");
testRelative("http://a", "a/b", "a/b"); testRelative("http://a", "a/b", "a/b");
testRelative("http://base.com/a/", "http://child.com/a/b", "http://child.com/a/b"); testRelative("http://base.com/a/", "http://child.com/a/b", "http://child.com/a/b");

2
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java

@ -674,7 +674,7 @@ public class DfsGarbageCollectorTest {
private boolean isObjectInPack(AnyObjectId id, DfsPackFile pack) private boolean isObjectInPack(AnyObjectId id, DfsPackFile pack)
throws IOException { throws IOException {
try (DfsReader reader = new DfsReader(odb)) { try (DfsReader reader = odb.newReader()) {
return pack.hasObject(reader, id); return pack.hasObject(reader, id);
} }
} }

2
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java

@ -711,7 +711,7 @@ public class PackWriterTest extends SampleDataRepositoryTestCase {
} }
ObjectWalk ow = walk.toObjectWalkWithSameObjects(); ObjectWalk ow = walk.toObjectWalkWithSameObjects();
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have); pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have, NONE);
String id = pw.computeName().getName(); String id = pw.computeName().getName();
File packdir = new File(repo.getObjectsDirectory(), "pack"); File packdir = new File(repo.getObjectsDirectory(), "pack");
File packFile = new File(packdir, "pack-" + id + ".pack"); File packFile = new File(packdir, "pack-" + id + ".pack");

16
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java

@ -69,6 +69,15 @@ public class GcCommitSelectionTest extends GcTestCase {
@Test @Test
public void testBitmapSpansNoMerges() throws Exception { public void testBitmapSpansNoMerges() throws Exception {
testBitmapSpansNoMerges(false);
}
@Test
public void testBitmapSpansNoMergesWithTags() throws Exception {
testBitmapSpansNoMerges(true);
}
private void testBitmapSpansNoMerges(boolean withTags) throws Exception {
/* /*
* Commit counts -> expected bitmap counts for history without merges. * Commit counts -> expected bitmap counts for history without merges.
* The top 100 contiguous commits should always have bitmaps, and the * The top 100 contiguous commits should always have bitmaps, and the
@ -89,7 +98,10 @@ public class GcCommitSelectionTest extends GcTestCase {
assertTrue(nextCommitCount > currentCommits); // programming error assertTrue(nextCommitCount > currentCommits); // programming error
for (int i = currentCommits; i < nextCommitCount; i++) { for (int i = currentCommits; i < nextCommitCount; i++) {
String str = "A" + i; String str = "A" + i;
bb.commit().message(str).add(str, str).create(); RevCommit rc = bb.commit().message(str).add(str, str).create();
if (withTags) {
tr.lightweightTag(str, rc);
}
} }
currentCommits = nextCommitCount; currentCommits = nextCommitCount;
@ -233,7 +245,7 @@ public class GcCommitSelectionTest extends GcTestCase {
m8, m9); m8, m9);
PackWriterBitmapPreparer preparer = newPeparer(m9, commits); PackWriterBitmapPreparer preparer = newPeparer(m9, commits);
List<BitmapCommit> selection = new ArrayList<>( List<BitmapCommit> selection = new ArrayList<>(
preparer.selectCommits(commits.size())); preparer.selectCommits(commits.size(), PackWriter.NONE));
// Verify that the output is ordered by the separate "chains" // Verify that the output is ordered by the separate "chains"
String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(), String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(),

15
org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java

@ -172,9 +172,18 @@ public class FSTest {
FS fs = FS.DETECTED.newInstance(); FS fs = FS.DETECTED.newInstance();
assumeTrue(fs instanceof FS_POSIX); assumeTrue(fs instanceof FS_POSIX);
String r = FS.readPipe(fs.userHome(), FS.readPipe(fs.userHome(),
new String[] { "bash", "--login", "-c", "foobar" }, new String[] { "/bin/sh", "-c", "exit 1" },
Charset.defaultCharset().name()); Charset.defaultCharset().name());
System.out.println(r); }
@Test(expected = CommandFailedException.class)
public void testReadPipeCommandStartFailure()
throws CommandFailedException {
FS fs = FS.DETECTED.newInstance();
FS.readPipe(fs.userHome(),
new String[] { "this-command-does-not-exist" },
Charset.defaultCharset().name());
} }
} }

3
org.eclipse.jgit/src/org/eclipse/jgit/api/errors/TooLargeObjectInPackException.java

@ -38,7 +38,8 @@
package org.eclipse.jgit.api.errors; package org.eclipse.jgit.api.errors;
/** /**
* Exception thrown when the server rejected a too large pack * Exception thrown when PackParser finds an object larger than a predefined
* limit
* *
* @since 4.4 * @since 4.4
*/ */

4
org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java

@ -220,7 +220,9 @@ public class RenameDetector {
* must be allocated, and 1,000,000 file compares may need to be performed. * must be allocated, and 1,000,000 file compares may need to be performed.
* *
* @param limit * @param limit
* new file limit. * new file limit. 0 means no limit; a negative number means no
* inexact rename detection will be performed, only exact rename
* detection.
*/ */
public void setRenameLimit(int limit) { public void setRenameLimit(int limit) {
renameLimit = limit; renameLimit = limit;

31
org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java

@ -731,7 +731,9 @@ public class RepoCommand extends GitCommand<RevCommit> {
* Returns the child if either base or child is not a bare path. This provides a missing feature in * Returns the child if either base or child is not a bare path. This provides a missing feature in
* java.net.URI (see http://bugs.java.com/view_bug.do?bug_id=6226081). * java.net.URI (see http://bugs.java.com/view_bug.do?bug_id=6226081).
*/ */
private static final String SLASH = "/"; //$NON-NLS-1$
static URI relativize(URI current, URI target) { static URI relativize(URI current, URI target) {
// We only handle bare paths for now. // We only handle bare paths for now.
if (!target.toString().equals(target.getPath())) { if (!target.toString().equals(target.getPath())) {
return target; return target;
@ -744,37 +746,46 @@ public class RepoCommand extends GitCommand<RevCommit> {
String dest = target.normalize().getPath(); String dest = target.normalize().getPath();
// TODO(hanwen): maybe (absolute, relative) should throw an exception. // TODO(hanwen): maybe (absolute, relative) should throw an exception.
if (cur.startsWith("/") != dest.startsWith("/")) { //$NON-NLS-1$//$NON-NLS-2$ if (cur.startsWith(SLASH) != dest.startsWith(SLASH)) {
return target; return target;
} }
while (cur.startsWith("/")) { //$NON-NLS-1$ while (cur.startsWith(SLASH)) {
cur = cur.substring(1); cur = cur.substring(1);
} }
while (dest.startsWith("/")) { //$NON-NLS-1$ while (dest.startsWith(SLASH)) {
dest = dest.substring(1); dest = dest.substring(1);
} }
if (!cur.endsWith("/")) { //$NON-NLS-1$ if (cur.indexOf('/') == -1 || dest.indexOf('/') == -1) {
// Avoid having to special-casing in the next two ifs.
String prefix = "prefix/"; //$NON-NLS-1$
cur = prefix + cur;
dest = prefix + dest;
}
if (!cur.endsWith(SLASH)) {
// The current file doesn't matter. // The current file doesn't matter.
cur = cur.substring(0, cur.lastIndexOf('/')); int lastSlash = cur.lastIndexOf('/');
cur = cur.substring(0, lastSlash);
} }
String destFile = ""; //$NON-NLS-1$ String destFile = ""; //$NON-NLS-1$
if (!dest.endsWith("/")) { //$NON-NLS-1$ if (!dest.endsWith(SLASH)) {
// We always have to provide the destination file. // We always have to provide the destination file.
destFile = dest.substring(dest.lastIndexOf('/') + 1, dest.length()); int lastSlash = dest.lastIndexOf('/');
destFile = dest.substring(lastSlash + 1, dest.length());
dest = dest.substring(0, dest.lastIndexOf('/')); dest = dest.substring(0, dest.lastIndexOf('/'));
} }
String[] cs = cur.split("/"); //$NON-NLS-1$ String[] cs = cur.split(SLASH);
String[] ds = dest.split("/"); //$NON-NLS-1$ String[] ds = dest.split(SLASH);
int common = 0; int common = 0;
while (common < cs.length && common < ds.length && cs[common].equals(ds[common])) { while (common < cs.length && common < ds.length && cs[common].equals(ds[common])) {
common++; common++;
} }
StringJoiner j = new StringJoiner("/"); //$NON-NLS-1$ StringJoiner j = new StringJoiner(SLASH);
for (int i = common; i < cs.length; i++) { for (int i = common; i < cs.length; i++) {
j.add(".."); //$NON-NLS-1$ j.add(".."); //$NON-NLS-1$
} }

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java

@ -322,6 +322,7 @@ public final class DfsBlockCache {
HashEntry e1 = table.get(slot); HashEntry e1 = table.get(slot);
DfsBlock v = scan(e1, key, position); DfsBlock v = scan(e1, key, position);
if (v != null) { if (v != null) {
ctx.stats.blockCacheHit++;
statHit.incrementAndGet(); statHit.incrementAndGet();
return v; return v;
} }
@ -334,6 +335,7 @@ public final class DfsBlockCache {
if (e2 != e1) { if (e2 != e1) {
v = scan(e2, key, position); v = scan(e2, key, position);
if (v != null) { if (v != null) {
ctx.stats.blockCacheHit++;
statHit.incrementAndGet(); statHit.incrementAndGet();
creditSpace(blockSize); creditSpace(blockSize);
return v; return v;

41
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java

@ -53,6 +53,7 @@ import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UN
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX; import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX; import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK; import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -111,7 +112,8 @@ public class DfsGarbageCollector {
private List<DfsPackFile> packsBefore; private List<DfsPackFile> packsBefore;
private List<DfsPackFile> expiredGarbagePacks; private List<DfsPackFile> expiredGarbagePacks;
private Set<ObjectId> allHeads; private Set<ObjectId> allHeadsAndTags;
private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads; private Set<ObjectId> nonHeads;
private Set<ObjectId> txnHeads; private Set<ObjectId> txnHeads;
private Set<ObjectId> tagTargets; private Set<ObjectId> tagTargets;
@ -233,7 +235,7 @@ public class DfsGarbageCollector {
JGitText.get().supportOnlyPackIndexVersion2); JGitText.get().supportOnlyPackIndexVersion2);
startTimeMillis = SystemReader.getInstance().getCurrentTime(); startTimeMillis = SystemReader.getInstance().getCurrentTime();
ctx = (DfsReader) objdb.newReader(); ctx = objdb.newReader();
try { try {
refdb.refresh(); refdb.refresh();
objdb.clearCache(); objdb.clearCache();
@ -241,23 +243,36 @@ public class DfsGarbageCollector {
Collection<Ref> refsBefore = getAllRefs(); Collection<Ref> refsBefore = getAllRefs();
readPacksBefore(); readPacksBefore();
allHeads = new HashSet<>(); Set<ObjectId> allHeads = new HashSet<>();
allHeadsAndTags = new HashSet<>();
allTags = new HashSet<>();
nonHeads = new HashSet<>(); nonHeads = new HashSet<>();
txnHeads = new HashSet<>(); txnHeads = new HashSet<>();
tagTargets = new HashSet<>(); tagTargets = new HashSet<>();
for (Ref ref : refsBefore) { for (Ref ref : refsBefore) {
if (ref.isSymbolic() || ref.getObjectId() == null) if (ref.isSymbolic() || ref.getObjectId() == null) {
continue; continue;
if (isHead(ref) || isTag(ref)) }
if (isHead(ref)) {
allHeads.add(ref.getObjectId()); allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName())) } else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId()); txnHeads.add(ref.getObjectId());
else } else {
nonHeads.add(ref.getObjectId()); nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null) }
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId()); tagTargets.add(ref.getPeeledObjectId());
}
} }
tagTargets.addAll(allHeads); // Don't exclude tags that are also branch tips.
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
boolean rollback = true; boolean rollback = true;
try { try {
@ -413,12 +428,12 @@ public class DfsGarbageCollector {
} }
private void packHeads(ProgressMonitor pm) throws IOException { private void packHeads(ProgressMonitor pm) throws IOException {
if (allHeads.isEmpty()) if (allHeadsAndTags.isEmpty())
return; return;
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets); pw.setTagTargets(tagTargets);
pw.preparePack(pm, allHeads, PackWriter.NONE); pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC, pw, pm, writePack(GC, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC)); estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
@ -432,7 +447,7 @@ public class DfsGarbageCollector {
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj) for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs); pw.excludeObjects(packedObjs);
pw.preparePack(pm, nonHeads, allHeads); pw.preparePack(pm, nonHeads, allHeadsAndTags);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC_REST, pw, pm, writePack(GC_REST, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST)); estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST));
@ -446,7 +461,7 @@ public class DfsGarbageCollector {
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj) for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs); pw.excludeObjects(packedObjs);
pw.preparePack(pm, txnHeads, PackWriter.NONE); pw.preparePack(pm, txnHeads, NONE);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC_TXN, pw, pm, 0 /* unknown pack size */); writePack(GC_TXN, pw, pm, 0 /* unknown pack size */);
} }

4
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java

@ -530,7 +530,7 @@ public class DfsInserter extends ObjectInserter {
} }
private class Reader extends ObjectReader { private class Reader extends ObjectReader {
private final DfsReader ctx = new DfsReader(db); private final DfsReader ctx = db.newReader();
@Override @Override
public ObjectReader newReader() { public ObjectReader newReader() {
@ -647,7 +647,7 @@ public class DfsInserter extends ObjectInserter {
@Override @Override
public ObjectStream openStream() throws IOException { public ObjectStream openStream() throws IOException {
final DfsReader ctx = new DfsReader(db); final DfsReader ctx = db.newReader();
if (srcPack != packKey) { if (srcPack != packKey) {
try { try {
// Post DfsInserter.flush() use the normal code path. // Post DfsInserter.flush() use the normal code path.

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java

@ -170,7 +170,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
} }
@Override @Override
public ObjectReader newReader() { public DfsReader newReader() {
return new DfsReader(this); return new DfsReader(this);
} }

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java

@ -201,7 +201,7 @@ public class DfsPackCompactor {
pm = NullProgressMonitor.INSTANCE; pm = NullProgressMonitor.INSTANCE;
DfsObjDatabase objdb = repo.getObjectDatabase(); DfsObjDatabase objdb = repo.getObjectDatabase();
try (DfsReader ctx = (DfsReader) objdb.newReader()) { try (DfsReader ctx = objdb.newReader()) {
PackConfig pc = new PackConfig(repo); PackConfig pc = new PackConfig(repo);
pc.setIndexVersion(2); pc.setIndexVersion(2);
pc.setDeltaCompress(false); pc.setDeltaCompress(false);

22
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java

@ -251,6 +251,8 @@ public final class DfsPackFile {
PackIndex idx; PackIndex idx;
try { try {
ctx.stats.readIdx++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, INDEX); ReadableChannel rc = ctx.db.openFile(packDesc, INDEX);
try { try {
InputStream in = Channels.newInputStream(rc); InputStream in = Channels.newInputStream(rc);
@ -260,10 +262,11 @@ public final class DfsPackFile {
bs = (wantSize / bs) * bs; bs = (wantSize / bs) * bs;
else if (bs <= 0) else if (bs <= 0)
bs = wantSize; bs = wantSize;
in = new BufferedInputStream(in, bs); idx = PackIndex.read(new BufferedInputStream(in, bs));
idx = PackIndex.read(in); ctx.stats.readIdxBytes += rc.position();
} finally { } finally {
rc.close(); rc.close();
ctx.stats.readIdxMicros += elapsedMicros(start);
} }
} catch (EOFException e) { } catch (EOFException e) {
invalid = true; invalid = true;
@ -286,6 +289,10 @@ public final class DfsPackFile {
} }
} }
private static long elapsedMicros(long start) {
return (System.nanoTime() - start) / 1000L;
}
final boolean isGarbage() { final boolean isGarbage() {
return packDesc.getPackSource() == UNREACHABLE_GARBAGE; return packDesc.getPackSource() == UNREACHABLE_GARBAGE;
} }
@ -314,6 +321,8 @@ public final class DfsPackFile {
long size; long size;
PackBitmapIndex idx; PackBitmapIndex idx;
try { try {
ctx.stats.readBitmap++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, BITMAP_INDEX); ReadableChannel rc = ctx.db.openFile(packDesc, BITMAP_INDEX);
try { try {
InputStream in = Channels.newInputStream(rc); InputStream in = Channels.newInputStream(rc);
@ -329,6 +338,8 @@ public final class DfsPackFile {
} finally { } finally {
size = rc.position(); size = rc.position();
rc.close(); rc.close();
ctx.stats.readIdxBytes += size;
ctx.stats.readIdxMicros += elapsedMicros(start);
} }
} catch (EOFException e) { } catch (EOFException e) {
IOException e2 = new IOException(MessageFormat.format( IOException e2 = new IOException(MessageFormat.format(
@ -777,6 +788,8 @@ public final class DfsPackFile {
if (invalid) if (invalid)
throw new PackInvalidException(getPackName()); throw new PackInvalidException(getPackName());
ctx.stats.readBlock++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, PACK); ReadableChannel rc = ctx.db.openFile(packDesc, PACK);
try { try {
int size = blockSize(rc); int size = blockSize(rc);
@ -803,6 +816,7 @@ public final class DfsPackFile {
byte[] buf = new byte[size]; byte[] buf = new byte[size];
rc.position(pos); rc.position(pos);
int cnt = read(rc, ByteBuffer.wrap(buf, 0, size)); int cnt = read(rc, ByteBuffer.wrap(buf, 0, size));
ctx.stats.readBlockBytes += cnt;
if (cnt != size) { if (cnt != size) {
if (0 <= len) { if (0 <= len) {
throw new EOFException(MessageFormat.format( throw new EOFException(MessageFormat.format(
@ -824,10 +838,10 @@ public final class DfsPackFile {
length = len = rc.size(); length = len = rc.size();
} }
DfsBlock v = new DfsBlock(key, pos, buf); return new DfsBlock(key, pos, buf);
return v;
} finally { } finally {
rc.close(); rc.close();
ctx.stats.readBlockMicros += elapsedMicros(start);
} }
} }

33
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java

@ -95,7 +95,7 @@ import org.eclipse.jgit.util.BlockList;
* See the base {@link ObjectReader} documentation for details. Notably, a * See the base {@link ObjectReader} documentation for details. Notably, a
* reader is not thread safe. * reader is not thread safe.
*/ */
public final class DfsReader extends ObjectReader implements ObjectReuseAsIs { public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private static final int MAX_RESOLVE_MATCHES = 256; private static final int MAX_RESOLVE_MATCHES = 256;
/** Temporary buffer large enough for at least one raw object id. */ /** Temporary buffer large enough for at least one raw object id. */
@ -104,17 +104,21 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
/** Database this reader loads objects from. */ /** Database this reader loads objects from. */
final DfsObjDatabase db; final DfsObjDatabase db;
private Inflater inf; final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
private Inflater inf;
private DfsBlock block; private DfsBlock block;
private DeltaBaseCache baseCache; private DeltaBaseCache baseCache;
private DfsPackFile last; private DfsPackFile last;
private boolean avoidUnreachable; private boolean avoidUnreachable;
DfsReader(DfsObjDatabase db) { /**
* Initialize a new DfsReader
*
* @param db
* parent DfsObjDatabase.
*/
protected DfsReader(DfsObjDatabase db) {
this.db = db; this.db = db;
this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold(); this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
} }
@ -131,7 +135,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
@Override @Override
public ObjectReader newReader() { public ObjectReader newReader() {
return new DfsReader(db); return db.newReader();
} }
@Override @Override
@ -170,6 +174,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
PackList packList = db.getPackList(); PackList packList = db.getPackList();
resolveImpl(packList, id, matches); resolveImpl(packList, id, matches);
if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) { if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
stats.scanPacks++;
resolveImpl(db.scanPacks(packList), id, matches); resolveImpl(db.scanPacks(packList), id, matches);
} }
return matches; return matches;
@ -198,6 +203,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
if (hasImpl(packList, objectId)) { if (hasImpl(packList, objectId)) {
return true; return true;
} else if (packList.dirty()) { } else if (packList.dirty()) {
stats.scanPacks++;
return hasImpl(db.scanPacks(packList), objectId); return hasImpl(db.scanPacks(packList), objectId);
} }
return false; return false;
@ -234,6 +240,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return checkType(ldr, objectId, typeHint); return checkType(ldr, objectId, typeHint);
} }
if (packList.dirty()) { if (packList.dirty()) {
stats.scanPacks++;
ldr = openImpl(db.scanPacks(packList), objectId); ldr = openImpl(db.scanPacks(packList), objectId);
if (ldr != null) { if (ldr != null) {
return checkType(ldr, objectId, typeHint); return checkType(ldr, objectId, typeHint);
@ -316,6 +323,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
List<FoundObject<T>> r = new ArrayList<>(); List<FoundObject<T>> r = new ArrayList<>();
findAllImpl(packList, pending, r); findAllImpl(packList, pending, r);
if (!pending.isEmpty() && packList.dirty()) { if (!pending.isEmpty() && packList.dirty()) {
stats.scanPacks++;
findAllImpl(db.scanPacks(packList), pending, r); findAllImpl(db.scanPacks(packList), pending, r);
} }
for (T t : pending) { for (T t : pending) {
@ -452,7 +460,6 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
final IOException findAllError = error; final IOException findAllError = error;
return new AsyncObjectSizeQueue<T>() { return new AsyncObjectSizeQueue<T>() {
private FoundObject<T> cur; private FoundObject<T> cur;
private long sz; private long sz;
@Override @Override
@ -718,9 +725,10 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
for (int dstoff = 0;;) { for (int dstoff = 0;;) {
int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff); int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
dstoff += n; dstoff += n;
if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
stats.inflatedBytes += dstoff;
return dstoff; return dstoff;
if (inf.needsInput()) { } else if (inf.needsInput()) {
pin(pack, position); pin(pack, position);
position += block.setInput(position, inf); position += block.setInput(position, inf);
} else if (n == 0) } else if (n == 0)
@ -764,6 +772,11 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
block = null; block = null;
} }
/** @return IO statistics accumulated by this reader. */
public DfsReaderIoStats getIoStats() {
return new DfsReaderIoStats(stats);
}
/** Release the current window cursor. */ /** Release the current window cursor. */
@Override @Override
public void close() { public void close() {

139
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java

@ -0,0 +1,139 @@
/*
* Copyright (C) 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.dfs;
/** IO statistics for a {@link DfsReader}. */
public class DfsReaderIoStats {
/** POJO to accumulate IO statistics. */
public static class Accumulator {
/** Number of times the reader explicitly called scanPacks. */
long scanPacks;
/** Total number of complete pack indexes read into memory. */
long readIdx;
/** Total number of complete bitmap indexes read into memory. */
long readBitmap;
/** Total number of bytes read from indexes. */
long readIdxBytes;
/** Total microseconds spent reading pack or bitmap indexes. */
long readIdxMicros;
/** Total number of block cache hits. */
long blockCacheHit;
/** Total number of discrete blocks read from pack file(s). */
long readBlock;
/** Total number of compressed bytes read as block sized units. */
long readBlockBytes;
/** Total microseconds spent reading {@link #readBlock} blocks. */
long readBlockMicros;
/** Total number of bytes decompressed. */
long inflatedBytes;
Accumulator() {
}
}
private final Accumulator stats;
DfsReaderIoStats(Accumulator stats) {
this.stats = stats;
}
/** @return number of times the reader explicitly called scanPacks. */
public long getScanPacks() {
return stats.scanPacks;
}
/** @return total number of complete pack indexes read into memory. */
public long getReadPackIndexCount() {
return stats.readIdx;
}
/** @return total number of complete bitmap indexes read into memory. */
public long getReadBitmapIndexCount() {
return stats.readBitmap;
}
/** @return total number of bytes read from indexes. */
public long getReadIndexBytes() {
return stats.readIdxBytes;
}
/** @return total microseconds spent reading pack or bitmap indexes. */
public long getReadIndexMicros() {
return stats.readIdxMicros;
}
/** @return total number of block cache hits. */
public long getBlockCacheHits() {
return stats.blockCacheHit;
}
/** @return total number of discrete blocks read from pack file(s). */
public long getReadBlocksCount() {
return stats.readBlock;
}
/** @return total number of compressed bytes read as block sized units. */
public long getReadBlocksBytes() {
return stats.readBlockBytes;
}
/** @return total microseconds spent reading blocks. */
public long getReadBlocksMicros() {
return stats.readBlockMicros;
}
/** @return total number of bytes decompressed. */
public long getInflatedBytes() {
return stats.inflatedBytes;
}
}

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java

@ -99,7 +99,7 @@ final class LargePackedWholeObject extends ObjectLoader {
@Override @Override
public ObjectStream openStream() throws MissingObjectException, IOException { public ObjectStream openStream() throws MissingObjectException, IOException {
DfsReader ctx = new DfsReader(db); DfsReader ctx = db.newReader();
InputStream in; InputStream in;
try { try {
in = new PackInputStream(pack, objectOffset + headerLength, ctx); in = new PackInputStream(pack, objectOffset + headerLength, ctx);

45
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java

@ -729,7 +729,9 @@ public class GC {
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
Collection<Ref> refsBefore = getAllRefs(); Collection<Ref> refsBefore = getAllRefs();
Set<ObjectId> allHeadsAndTags = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>(); Set<ObjectId> allHeads = new HashSet<>();
Set<ObjectId> allTags = new HashSet<>();
Set<ObjectId> nonHeads = new HashSet<>(); Set<ObjectId> nonHeads = new HashSet<>();
Set<ObjectId> txnHeads = new HashSet<>(); Set<ObjectId> txnHeads = new HashSet<>();
Set<ObjectId> tagTargets = new HashSet<>(); Set<ObjectId> tagTargets = new HashSet<>();
@ -739,16 +741,21 @@ public class GC {
for (Ref ref : refsBefore) { for (Ref ref : refsBefore) {
checkCancelled(); checkCancelled();
nonHeads.addAll(listRefLogObjects(ref, 0)); nonHeads.addAll(listRefLogObjects(ref, 0));
if (ref.isSymbolic() || ref.getObjectId() == null) if (ref.isSymbolic() || ref.getObjectId() == null) {
continue; continue;
if (isHead(ref) || isTag(ref)) }
if (isHead(ref)) {
allHeads.add(ref.getObjectId()); allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName())) } else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId()); txnHeads.add(ref.getObjectId());
else } else {
nonHeads.add(ref.getObjectId()); nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null) }
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId()); tagTargets.add(ref.getPeeledObjectId());
}
} }
List<ObjectIdSet> excluded = new LinkedList<>(); List<ObjectIdSet> excluded = new LinkedList<>();
@ -758,13 +765,19 @@ public class GC {
excluded.add(f.getIndex()); excluded.add(f.getIndex());
} }
tagTargets.addAll(allHeads); // Don't exclude tags that are also branch tips
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects); nonHeads.addAll(indexObjects);
List<PackFile> ret = new ArrayList<>(2); List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null; PackFile heads = null;
if (!allHeads.isEmpty()) { if (!allHeadsAndTags.isEmpty()) {
heads = writePack(allHeads, Collections.<ObjectId> emptySet(), heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
tagTargets, excluded); tagTargets, excluded);
if (heads != null) { if (heads != null) {
ret.add(heads); ret.add(heads);
@ -772,12 +785,14 @@ public class GC {
} }
} }
if (!nonHeads.isEmpty()) { if (!nonHeads.isEmpty()) {
PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded); PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
tagTargets, excluded);
if (rest != null) if (rest != null)
ret.add(rest); ret.add(rest);
} }
if (!txnHeads.isEmpty()) { if (!txnHeads.isEmpty()) {
PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded); PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
null, excluded);
if (txn != null) if (txn != null)
ret.add(txn); ret.add(txn);
} }
@ -961,8 +976,9 @@ public class GC {
} }
private PackFile writePack(@NonNull Set<? extends ObjectId> want, private PackFile writePack(@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets, @NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
List<ObjectIdSet> excludeObjects) throws IOException { Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
throws IOException {
checkCancelled(); checkCancelled();
File tmpPack = null; File tmpPack = null;
Map<PackExt, File> tmpExts = new TreeMap<>( Map<PackExt, File> tmpExts = new TreeMap<>(
@ -988,12 +1004,13 @@ public class GC {
// prepare the PackWriter // prepare the PackWriter
pw.setDeltaBaseAsOffset(true); pw.setDeltaBaseAsOffset(true);
pw.setReuseDeltaCommits(false); pw.setReuseDeltaCommits(false);
if (tagTargets != null) if (tagTargets != null) {
pw.setTagTargets(tagTargets); pw.setTagTargets(tagTargets);
}
if (excludeObjects != null) if (excludeObjects != null)
for (ObjectIdSet idx : excludeObjects) for (ObjectIdSet idx : excludeObjects)
pw.excludeObjects(idx); pw.excludeObjects(idx);
pw.preparePack(pm, want, have); pw.preparePack(pm, want, have, PackWriter.NONE, tags);
if (pw.getObjectCount() == 0) if (pw.getObjectCount() == 0)
return null; return null;
checkCancelled(); checkCancelled();

96
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java

@ -233,7 +233,9 @@ public class PackWriter implements AutoCloseable {
private List<CachedPack> cachedPacks = new ArrayList<>(2); private List<CachedPack> cachedPacks = new ArrayList<>(2);
private Set<ObjectId> tagTargets = Collections.emptySet(); private Set<ObjectId> tagTargets = NONE;
private Set<? extends ObjectId> excludeFromBitmapSelection = NONE;
private ObjectIdSet[] excludeInPacks; private ObjectIdSet[] excludeInPacks;
@ -712,8 +714,7 @@ public class PackWriter implements AutoCloseable {
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException { @NonNull Set<? extends ObjectId> have) throws IOException {
preparePack(countingMonitor, preparePack(countingMonitor, want, have, NONE, NONE);
want, have, Collections.<ObjectId> emptySet());
} }
/** /**
@ -721,9 +722,9 @@ public class PackWriter implements AutoCloseable {
* <p> * <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows * Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits). * specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not * The caller is responsible for filtering out commits that should not be
* be shallow any more ("unshallow" commits as in {@link #setShallowPack}) * shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* from the shallow set. * the shallow set.
* *
* @param countingMonitor * @param countingMonitor
* progress during object enumeration. * progress during object enumeration.
@ -731,27 +732,67 @@ public class PackWriter implements AutoCloseable {
* objects of interest, ancestors of which will be included in * objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}. * the pack. Must not be {@code null}.
* @param have * @param have
* objects whose ancestors (up to and including * objects whose ancestors (up to and including {@code shallow}
* {@code shallow} commits) do not need to be included in the * commits) do not need to be included in the pack because they
* pack because they are already available from elsewhere. * are already available from elsewhere. Must not be
* Must not be {@code null}. * {@code null}.
* @param shallow * @param shallow
* commits indicating the boundary of the history marked with * commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those * {@code have}. Shallow commits have parents but those parents
* parents are considered not to be already available. * are considered not to be already available. Parents of
* Parents of {@code shallow} commits and earlier generations * {@code shallow} commits and earlier generations will be
* will be included in the pack if requested by {@code want}. * included in the pack if requested by {@code want}. Must not be
* Must not be {@code null}. * {@code null}.
* @throws IOException * @throws IOException
* an I/O problem occured while reading objects. * an I/O problem occurred while reading objects.
*/ */
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, @NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow) throws IOException { @NonNull Set<? extends ObjectId> shallow) throws IOException {
preparePack(countingMonitor, want, have, shallow, NONE);
}
/**
* Prepare the list of objects to be written to the pack stream.
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
* @param want
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
try (ObjectWalk ow = getObjectWalk()) { try (ObjectWalk ow = getObjectWalk()) {
ow.assumeShallow(shallow); ow.assumeShallow(shallow);
preparePack(countingMonitor, ow, want, have); preparePack(countingMonitor, ow, want, have, noBitmaps);
} }
} }
@ -784,13 +825,17 @@ public class PackWriter implements AutoCloseable {
* points of graph traversal). Pass {@link #NONE} if all objects * points of graph traversal). Pass {@link #NONE} if all objects
* reachable from {@code want} are desired, such as when serving * reachable from {@code want} are desired, such as when serving
* a clone. * a clone.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException * @throws IOException
* when some I/O problem occur during reading objects. * when some I/O problem occur during reading objects.
*/ */
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull ObjectWalk walk, @NonNull ObjectWalk walk,
@NonNull Set<? extends ObjectId> interestingObjects, @NonNull Set<? extends ObjectId> interestingObjects,
@NonNull Set<? extends ObjectId> uninterestingObjects) @NonNull Set<? extends ObjectId> uninterestingObjects,
@NonNull Set<? extends ObjectId> noBitmaps)
throws IOException { throws IOException {
if (countingMonitor == null) if (countingMonitor == null)
countingMonitor = NullProgressMonitor.INSTANCE; countingMonitor = NullProgressMonitor.INSTANCE;
@ -798,7 +843,7 @@ public class PackWriter implements AutoCloseable {
throw new IllegalArgumentException( throw new IllegalArgumentException(
JGitText.get().shallowPacksRequireDepthWalk); JGitText.get().shallowPacksRequireDepthWalk);
findObjectsToPack(countingMonitor, walk, interestingObjects, findObjectsToPack(countingMonitor, walk, interestingObjects,
uninterestingObjects); uninterestingObjects, noBitmaps);
} }
/** /**
@ -965,8 +1010,9 @@ public class PackWriter implements AutoCloseable {
/** /**
* Write the prepared pack to the supplied stream. * Write the prepared pack to the supplied stream.
* <p> * <p>
* Called after {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set)} * Called after
* or {@link #preparePack(ProgressMonitor, Set, Set)}. * {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set, Set)} or
* {@link #preparePack(ProgressMonitor, Set, Set)}.
* <p> * <p>
* Performs delta search if enabled and writes the pack stream. * Performs delta search if enabled and writes the pack stream.
* <p> * <p>
@ -1652,12 +1698,14 @@ public class PackWriter implements AutoCloseable {
private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor, private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor,
@NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want, @NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException { @NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
final long countingStart = System.currentTimeMillis(); final long countingStart = System.currentTimeMillis();
beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN); beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN);
stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want)); stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want));
stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have)); stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have));
excludeFromBitmapSelection = noBitmaps;
canBuildBitmaps = config.isBuildBitmaps() canBuildBitmaps = config.isBuildBitmaps()
&& !shallowPack && !shallowPack
@ -2070,8 +2118,8 @@ public class PackWriter implements AutoCloseable {
PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer( PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer(
reader, writeBitmaps, pm, stats.interestingObjects, config); reader, writeBitmaps, pm, stats.interestingObjects, config);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = bitmapPreparer
bitmapPreparer.selectCommits(numCommits); .selectCommits(numCommits, excludeFromBitmapSelection);
beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size()); beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size());

18
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java

@ -141,6 +141,8 @@ class PackWriterBitmapPreparer {
* *
* @param expectedCommitCount * @param expectedCommitCount
* count of commits in the pack * count of commits in the pack
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return commit objects for which bitmap indices should be built * @return commit objects for which bitmap indices should be built
* @throws IncorrectObjectTypeException * @throws IncorrectObjectTypeException
* if any of the processed objects is not a commit * if any of the processed objects is not a commit
@ -149,7 +151,8 @@ class PackWriterBitmapPreparer {
* @throws MissingObjectException * @throws MissingObjectException
* if an expected object is missing * if an expected object is missing
*/ */
Collection<BitmapCommit> selectCommits(int expectedCommitCount) Collection<BitmapCommit> selectCommits(int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException, throws IncorrectObjectTypeException, IOException,
MissingObjectException { MissingObjectException {
/* /*
@ -164,7 +167,7 @@ class PackWriterBitmapPreparer {
RevWalk rw = new RevWalk(reader); RevWalk rw = new RevWalk(reader);
rw.setRetainBody(false); rw.setRetainBody(false);
CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw, CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw,
expectedCommitCount); expectedCommitCount, excludeFromBitmapSelection);
pm.endTask(); pm.endTask();
int totCommits = selectionHelper.getCommitCount(); int totCommits = selectionHelper.getCommitCount();
@ -363,6 +366,8 @@ class PackWriterBitmapPreparer {
* @param expectedCommitCount * @param expectedCommitCount
* expected count of commits. The actual count may be less due to * expected count of commits. The actual count may be less due to
* unreachable garbage. * unreachable garbage.
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return a {@link CommitSelectionHelper} containing bitmaps for the tip * @return a {@link CommitSelectionHelper} containing bitmaps for the tip
* commits * commits
* @throws IncorrectObjectTypeException * @throws IncorrectObjectTypeException
@ -373,8 +378,10 @@ class PackWriterBitmapPreparer {
* if an expected object is missing * if an expected object is missing
*/ */
private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw, private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
int expectedCommitCount) throws IncorrectObjectTypeException, int expectedCommitCount,
IOException, MissingObjectException { Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder(); BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder();
List<BitmapCommit> reuseCommits = new ArrayList<>(); List<BitmapCommit> reuseCommits = new ArrayList<>();
for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) { for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) {
@ -403,7 +410,8 @@ class PackWriterBitmapPreparer {
Set<RevCommit> peeledWant = new HashSet<>(want.size()); Set<RevCommit> peeledWant = new HashSet<>(want.size());
for (AnyObjectId objectId : want) { for (AnyObjectId objectId : want) {
RevObject ro = rw.peel(rw.parseAny(objectId)); RevObject ro = rw.peel(rw.parseAny(objectId));
if (!(ro instanceof RevCommit) || reuse.contains(ro)) { if (!(ro instanceof RevCommit) || reuse.contains(ro)
|| excludeFromBitmapSelection.contains(ro)) {
continue; continue;
} }

2
org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java

@ -1523,7 +1523,7 @@ public class UploadPack {
walk.reset(); walk.reset();
ObjectWalk ow = rw.toObjectWalkWithSameObjects(); ObjectWalk ow = rw.toObjectWalkWithSameObjects();
pw.preparePack(pm, ow, wantAll, commonBase); pw.preparePack(pm, ow, wantAll, commonBase, PackWriter.NONE);
rw = ow; rw = ow;
} }

8
org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java

@ -497,7 +497,13 @@ public abstract class FS {
if (env != null) { if (env != null) {
pb.environment().putAll(env); pb.environment().putAll(env);
} }
Process p = pb.start(); Process p;
try {
p = pb.start();
} catch (IOException e) {
// Process failed to start
throw new CommandFailedException(-1, e.getMessage(), e);
}
p.getOutputStream().close(); p.getOutputStream().close();
GobblerThread gobbler = new GobblerThread(p, command, dir); GobblerThread gobbler = new GobblerThread(p, command, dir);
gobbler.start(); gobbler.start();

Loading…
Cancel
Save