Browse Source

Merge branch 'master' into stable-4.8

* master:
  Fix out-of-bounds exception in RepoCommand#relative
  Fix null return from FS.readPipe when command fails to launch
  RenameDetector: Clarify rename limits <= 0
  Remove unnecessary cast for DfsReader
  Allow DfsReader to be subclassed
  Track read IO for DfsReader
  Fix javadoc of TooLargeObjectInPackException
  Exclude refs/tags from bitmap commit selection

Change-Id: I9cd20ded108d2e5d81fa1f0c2cb9aa0eabe1f256
stable-4.8
Matthias Sohn 8 years ago
parent
commit
7e1a11f292
  1. 1
      org.eclipse.jgit.test/tst/org/eclipse/jgit/gitrepo/RepoCommandTest.java
  2. 2
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
  3. 2
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java
  4. 16
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java
  5. 15
      org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java
  6. 3
      org.eclipse.jgit/src/org/eclipse/jgit/api/errors/TooLargeObjectInPackException.java
  7. 4
      org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java
  8. 31
      org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
  9. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
  10. 41
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
  11. 4
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
  12. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
  13. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
  14. 22
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
  15. 33
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
  16. 139
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
  17. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
  18. 45
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
  19. 96
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
  20. 18
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java
  21. 2
      org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
  22. 8
      org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java

1
org.eclipse.jgit.test/tst/org/eclipse/jgit/gitrepo/RepoCommandTest.java

@ -1120,6 +1120,7 @@ public class RepoCommandTest extends RepositoryTestCase {
testRelative("a/", "a/b", "b");
testRelative("/a/b/c", "/b/c", "../../b/c");
testRelative("/abc", "bcd", "bcd");
testRelative("abc", "def", "def");
testRelative("abc", "/bcd", "/bcd");
testRelative("http://a", "a/b", "a/b");
testRelative("http://base.com/a/", "http://child.com/a/b", "http://child.com/a/b");

2
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java

@ -674,7 +674,7 @@ public class DfsGarbageCollectorTest {
private boolean isObjectInPack(AnyObjectId id, DfsPackFile pack)
throws IOException {
try (DfsReader reader = new DfsReader(odb)) {
try (DfsReader reader = odb.newReader()) {
return pack.hasObject(reader, id);
}
}

2
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java

@ -711,7 +711,7 @@ public class PackWriterTest extends SampleDataRepositoryTestCase {
}
ObjectWalk ow = walk.toObjectWalkWithSameObjects();
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have);
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have, NONE);
String id = pw.computeName().getName();
File packdir = new File(repo.getObjectsDirectory(), "pack");
File packFile = new File(packdir, "pack-" + id + ".pack");

16
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java

@ -69,6 +69,15 @@ public class GcCommitSelectionTest extends GcTestCase {
@Test
public void testBitmapSpansNoMerges() throws Exception {
testBitmapSpansNoMerges(false);
}
@Test
public void testBitmapSpansNoMergesWithTags() throws Exception {
testBitmapSpansNoMerges(true);
}
private void testBitmapSpansNoMerges(boolean withTags) throws Exception {
/*
* Commit counts -> expected bitmap counts for history without merges.
* The top 100 contiguous commits should always have bitmaps, and the
@ -89,7 +98,10 @@ public class GcCommitSelectionTest extends GcTestCase {
assertTrue(nextCommitCount > currentCommits); // programming error
for (int i = currentCommits; i < nextCommitCount; i++) {
String str = "A" + i;
bb.commit().message(str).add(str, str).create();
RevCommit rc = bb.commit().message(str).add(str, str).create();
if (withTags) {
tr.lightweightTag(str, rc);
}
}
currentCommits = nextCommitCount;
@ -233,7 +245,7 @@ public class GcCommitSelectionTest extends GcTestCase {
m8, m9);
PackWriterBitmapPreparer preparer = newPeparer(m9, commits);
List<BitmapCommit> selection = new ArrayList<>(
preparer.selectCommits(commits.size()));
preparer.selectCommits(commits.size(), PackWriter.NONE));
// Verify that the output is ordered by the separate "chains"
String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(),

15
org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java

@ -172,9 +172,18 @@ public class FSTest {
FS fs = FS.DETECTED.newInstance();
assumeTrue(fs instanceof FS_POSIX);
String r = FS.readPipe(fs.userHome(),
new String[] { "bash", "--login", "-c", "foobar" },
FS.readPipe(fs.userHome(),
new String[] { "/bin/sh", "-c", "exit 1" },
Charset.defaultCharset().name());
}
@Test(expected = CommandFailedException.class)
public void testReadPipeCommandStartFailure()
throws CommandFailedException {
FS fs = FS.DETECTED.newInstance();
FS.readPipe(fs.userHome(),
new String[] { "this-command-does-not-exist" },
Charset.defaultCharset().name());
System.out.println(r);
}
}

3
org.eclipse.jgit/src/org/eclipse/jgit/api/errors/TooLargeObjectInPackException.java

@ -38,7 +38,8 @@
package org.eclipse.jgit.api.errors;
/**
* Exception thrown when the server rejected a too large pack
* Exception thrown when PackParser finds an object larger than a predefined
* limit
*
* @since 4.4
*/

4
org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java

@ -220,7 +220,9 @@ public class RenameDetector {
* must be allocated, and 1,000,000 file compares may need to be performed.
*
* @param limit
* new file limit.
* new file limit. 0 means no limit; a negative number means no
* inexact rename detection will be performed, only exact rename
* detection.
*/
public void setRenameLimit(int limit) {
renameLimit = limit;

31
org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java

@ -731,7 +731,9 @@ public class RepoCommand extends GitCommand<RevCommit> {
* Returns the child if either base or child is not a bare path. This provides a missing feature in
* java.net.URI (see http://bugs.java.com/view_bug.do?bug_id=6226081).
*/
private static final String SLASH = "/"; //$NON-NLS-1$
static URI relativize(URI current, URI target) {
// We only handle bare paths for now.
if (!target.toString().equals(target.getPath())) {
return target;
@ -744,37 +746,46 @@ public class RepoCommand extends GitCommand<RevCommit> {
String dest = target.normalize().getPath();
// TODO(hanwen): maybe (absolute, relative) should throw an exception.
if (cur.startsWith("/") != dest.startsWith("/")) { //$NON-NLS-1$//$NON-NLS-2$
if (cur.startsWith(SLASH) != dest.startsWith(SLASH)) {
return target;
}
while (cur.startsWith("/")) { //$NON-NLS-1$
while (cur.startsWith(SLASH)) {
cur = cur.substring(1);
}
while (dest.startsWith("/")) { //$NON-NLS-1$
while (dest.startsWith(SLASH)) {
dest = dest.substring(1);
}
if (!cur.endsWith("/")) { //$NON-NLS-1$
if (cur.indexOf('/') == -1 || dest.indexOf('/') == -1) {
// Avoid having to special-casing in the next two ifs.
String prefix = "prefix/"; //$NON-NLS-1$
cur = prefix + cur;
dest = prefix + dest;
}
if (!cur.endsWith(SLASH)) {
// The current file doesn't matter.
cur = cur.substring(0, cur.lastIndexOf('/'));
int lastSlash = cur.lastIndexOf('/');
cur = cur.substring(0, lastSlash);
}
String destFile = ""; //$NON-NLS-1$
if (!dest.endsWith("/")) { //$NON-NLS-1$
if (!dest.endsWith(SLASH)) {
// We always have to provide the destination file.
destFile = dest.substring(dest.lastIndexOf('/') + 1, dest.length());
int lastSlash = dest.lastIndexOf('/');
destFile = dest.substring(lastSlash + 1, dest.length());
dest = dest.substring(0, dest.lastIndexOf('/'));
}
String[] cs = cur.split("/"); //$NON-NLS-1$
String[] ds = dest.split("/"); //$NON-NLS-1$
String[] cs = cur.split(SLASH);
String[] ds = dest.split(SLASH);
int common = 0;
while (common < cs.length && common < ds.length && cs[common].equals(ds[common])) {
common++;
}
StringJoiner j = new StringJoiner("/"); //$NON-NLS-1$
StringJoiner j = new StringJoiner(SLASH);
for (int i = common; i < cs.length; i++) {
j.add(".."); //$NON-NLS-1$
}

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java

@ -322,6 +322,7 @@ public final class DfsBlockCache {
HashEntry e1 = table.get(slot);
DfsBlock v = scan(e1, key, position);
if (v != null) {
ctx.stats.blockCacheHit++;
statHit.incrementAndGet();
return v;
}
@ -334,6 +335,7 @@ public final class DfsBlockCache {
if (e2 != e1) {
v = scan(e2, key, position);
if (v != null) {
ctx.stats.blockCacheHit++;
statHit.incrementAndGet();
creditSpace(blockSize);
return v;

41
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java

@ -53,6 +53,7 @@ import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UN
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException;
import java.util.ArrayList;
@ -111,7 +112,8 @@ public class DfsGarbageCollector {
private List<DfsPackFile> packsBefore;
private List<DfsPackFile> expiredGarbagePacks;
private Set<ObjectId> allHeads;
private Set<ObjectId> allHeadsAndTags;
private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads;
private Set<ObjectId> txnHeads;
private Set<ObjectId> tagTargets;
@ -233,7 +235,7 @@ public class DfsGarbageCollector {
JGitText.get().supportOnlyPackIndexVersion2);
startTimeMillis = SystemReader.getInstance().getCurrentTime();
ctx = (DfsReader) objdb.newReader();
ctx = objdb.newReader();
try {
refdb.refresh();
objdb.clearCache();
@ -241,23 +243,36 @@ public class DfsGarbageCollector {
Collection<Ref> refsBefore = getAllRefs();
readPacksBefore();
allHeads = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>();
allHeadsAndTags = new HashSet<>();
allTags = new HashSet<>();
nonHeads = new HashSet<>();
txnHeads = new HashSet<>();
tagTargets = new HashSet<>();
for (Ref ref : refsBefore) {
if (ref.isSymbolic() || ref.getObjectId() == null)
if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
if (isHead(ref) || isTag(ref))
}
if (isHead(ref)) {
allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName()))
} else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
else
} else {
nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null)
}
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
}
tagTargets.addAll(allHeads);
}
// Don't exclude tags that are also branch tips.
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
boolean rollback = true;
try {
@ -413,12 +428,12 @@ public class DfsGarbageCollector {
}
private void packHeads(ProgressMonitor pm) throws IOException {
if (allHeads.isEmpty())
if (allHeadsAndTags.isEmpty())
return;
try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets);
pw.preparePack(pm, allHeads, PackWriter.NONE);
pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
if (0 < pw.getObjectCount())
writePack(GC, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
@ -432,7 +447,7 @@ public class DfsGarbageCollector {
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
pw.preparePack(pm, nonHeads, allHeads);
pw.preparePack(pm, nonHeads, allHeadsAndTags);
if (0 < pw.getObjectCount())
writePack(GC_REST, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST));
@ -446,7 +461,7 @@ public class DfsGarbageCollector {
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
pw.preparePack(pm, txnHeads, PackWriter.NONE);
pw.preparePack(pm, txnHeads, NONE);
if (0 < pw.getObjectCount())
writePack(GC_TXN, pw, pm, 0 /* unknown pack size */);
}

4
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java

@ -530,7 +530,7 @@ public class DfsInserter extends ObjectInserter {
}
private class Reader extends ObjectReader {
private final DfsReader ctx = new DfsReader(db);
private final DfsReader ctx = db.newReader();
@Override
public ObjectReader newReader() {
@ -647,7 +647,7 @@ public class DfsInserter extends ObjectInserter {
@Override
public ObjectStream openStream() throws IOException {
final DfsReader ctx = new DfsReader(db);
final DfsReader ctx = db.newReader();
if (srcPack != packKey) {
try {
// Post DfsInserter.flush() use the normal code path.

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java

@ -170,7 +170,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
@Override
public ObjectReader newReader() {
public DfsReader newReader() {
return new DfsReader(this);
}

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java

@ -201,7 +201,7 @@ public class DfsPackCompactor {
pm = NullProgressMonitor.INSTANCE;
DfsObjDatabase objdb = repo.getObjectDatabase();
try (DfsReader ctx = (DfsReader) objdb.newReader()) {
try (DfsReader ctx = objdb.newReader()) {
PackConfig pc = new PackConfig(repo);
pc.setIndexVersion(2);
pc.setDeltaCompress(false);

22
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java

@ -251,6 +251,8 @@ public final class DfsPackFile {
PackIndex idx;
try {
ctx.stats.readIdx++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, INDEX);
try {
InputStream in = Channels.newInputStream(rc);
@ -260,10 +262,11 @@ public final class DfsPackFile {
bs = (wantSize / bs) * bs;
else if (bs <= 0)
bs = wantSize;
in = new BufferedInputStream(in, bs);
idx = PackIndex.read(in);
idx = PackIndex.read(new BufferedInputStream(in, bs));
ctx.stats.readIdxBytes += rc.position();
} finally {
rc.close();
ctx.stats.readIdxMicros += elapsedMicros(start);
}
} catch (EOFException e) {
invalid = true;
@ -286,6 +289,10 @@ public final class DfsPackFile {
}
}
private static long elapsedMicros(long start) {
return (System.nanoTime() - start) / 1000L;
}
final boolean isGarbage() {
return packDesc.getPackSource() == UNREACHABLE_GARBAGE;
}
@ -314,6 +321,8 @@ public final class DfsPackFile {
long size;
PackBitmapIndex idx;
try {
ctx.stats.readBitmap++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, BITMAP_INDEX);
try {
InputStream in = Channels.newInputStream(rc);
@ -329,6 +338,8 @@ public final class DfsPackFile {
} finally {
size = rc.position();
rc.close();
ctx.stats.readIdxBytes += size;
ctx.stats.readIdxMicros += elapsedMicros(start);
}
} catch (EOFException e) {
IOException e2 = new IOException(MessageFormat.format(
@ -777,6 +788,8 @@ public final class DfsPackFile {
if (invalid)
throw new PackInvalidException(getPackName());
ctx.stats.readBlock++;
long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, PACK);
try {
int size = blockSize(rc);
@ -803,6 +816,7 @@ public final class DfsPackFile {
byte[] buf = new byte[size];
rc.position(pos);
int cnt = read(rc, ByteBuffer.wrap(buf, 0, size));
ctx.stats.readBlockBytes += cnt;
if (cnt != size) {
if (0 <= len) {
throw new EOFException(MessageFormat.format(
@ -824,10 +838,10 @@ public final class DfsPackFile {
length = len = rc.size();
}
DfsBlock v = new DfsBlock(key, pos, buf);
return v;
return new DfsBlock(key, pos, buf);
} finally {
rc.close();
ctx.stats.readBlockMicros += elapsedMicros(start);
}
}

33
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java

@ -95,7 +95,7 @@ import org.eclipse.jgit.util.BlockList;
* See the base {@link ObjectReader} documentation for details. Notably, a
* reader is not thread safe.
*/
public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private static final int MAX_RESOLVE_MATCHES = 256;
/** Temporary buffer large enough for at least one raw object id. */
@ -104,17 +104,21 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
/** Database this reader loads objects from. */
final DfsObjDatabase db;
private Inflater inf;
final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
private Inflater inf;
private DfsBlock block;
private DeltaBaseCache baseCache;
private DfsPackFile last;
private boolean avoidUnreachable;
DfsReader(DfsObjDatabase db) {
/**
* Initialize a new DfsReader
*
* @param db
* parent DfsObjDatabase.
*/
protected DfsReader(DfsObjDatabase db) {
this.db = db;
this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
}
@ -131,7 +135,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
@Override
public ObjectReader newReader() {
return new DfsReader(db);
return db.newReader();
}
@Override
@ -170,6 +174,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
PackList packList = db.getPackList();
resolveImpl(packList, id, matches);
if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
stats.scanPacks++;
resolveImpl(db.scanPacks(packList), id, matches);
}
return matches;
@ -198,6 +203,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
if (hasImpl(packList, objectId)) {
return true;
} else if (packList.dirty()) {
stats.scanPacks++;
return hasImpl(db.scanPacks(packList), objectId);
}
return false;
@ -234,6 +240,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return checkType(ldr, objectId, typeHint);
}
if (packList.dirty()) {
stats.scanPacks++;
ldr = openImpl(db.scanPacks(packList), objectId);
if (ldr != null) {
return checkType(ldr, objectId, typeHint);
@ -316,6 +323,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
List<FoundObject<T>> r = new ArrayList<>();
findAllImpl(packList, pending, r);
if (!pending.isEmpty() && packList.dirty()) {
stats.scanPacks++;
findAllImpl(db.scanPacks(packList), pending, r);
}
for (T t : pending) {
@ -452,7 +460,6 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
final IOException findAllError = error;
return new AsyncObjectSizeQueue<T>() {
private FoundObject<T> cur;
private long sz;
@Override
@ -718,9 +725,10 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
for (int dstoff = 0;;) {
int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
dstoff += n;
if (inf.finished() || (headerOnly && dstoff == dstbuf.length))
if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
stats.inflatedBytes += dstoff;
return dstoff;
if (inf.needsInput()) {
} else if (inf.needsInput()) {
pin(pack, position);
position += block.setInput(position, inf);
} else if (n == 0)
@ -764,6 +772,11 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
block = null;
}
/** @return IO statistics accumulated by this reader. */
public DfsReaderIoStats getIoStats() {
return new DfsReaderIoStats(stats);
}
/** Release the current window cursor. */
@Override
public void close() {

139
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java

@ -0,0 +1,139 @@
/*
* Copyright (C) 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.dfs;
/** IO statistics for a {@link DfsReader}. */
public class DfsReaderIoStats {
/** POJO to accumulate IO statistics. */
public static class Accumulator {
/** Number of times the reader explicitly called scanPacks. */
long scanPacks;
/** Total number of complete pack indexes read into memory. */
long readIdx;
/** Total number of complete bitmap indexes read into memory. */
long readBitmap;
/** Total number of bytes read from indexes. */
long readIdxBytes;
/** Total microseconds spent reading pack or bitmap indexes. */
long readIdxMicros;
/** Total number of block cache hits. */
long blockCacheHit;
/** Total number of discrete blocks read from pack file(s). */
long readBlock;
/** Total number of compressed bytes read as block sized units. */
long readBlockBytes;
/** Total microseconds spent reading {@link #readBlock} blocks. */
long readBlockMicros;
/** Total number of bytes decompressed. */
long inflatedBytes;
Accumulator() {
}
}
private final Accumulator stats;
DfsReaderIoStats(Accumulator stats) {
this.stats = stats;
}
/** @return number of times the reader explicitly called scanPacks. */
public long getScanPacks() {
return stats.scanPacks;
}
/** @return total number of complete pack indexes read into memory. */
public long getReadPackIndexCount() {
return stats.readIdx;
}
/** @return total number of complete bitmap indexes read into memory. */
public long getReadBitmapIndexCount() {
return stats.readBitmap;
}
/** @return total number of bytes read from indexes. */
public long getReadIndexBytes() {
return stats.readIdxBytes;
}
/** @return total microseconds spent reading pack or bitmap indexes. */
public long getReadIndexMicros() {
return stats.readIdxMicros;
}
/** @return total number of block cache hits. */
public long getBlockCacheHits() {
return stats.blockCacheHit;
}
/** @return total number of discrete blocks read from pack file(s). */
public long getReadBlocksCount() {
return stats.readBlock;
}
/** @return total number of compressed bytes read as block sized units. */
public long getReadBlocksBytes() {
return stats.readBlockBytes;
}
/** @return total microseconds spent reading blocks. */
public long getReadBlocksMicros() {
return stats.readBlockMicros;
}
/** @return total number of bytes decompressed. */
public long getInflatedBytes() {
return stats.inflatedBytes;
}
}

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java

@ -99,7 +99,7 @@ final class LargePackedWholeObject extends ObjectLoader {
@Override
public ObjectStream openStream() throws MissingObjectException, IOException {
DfsReader ctx = new DfsReader(db);
DfsReader ctx = db.newReader();
InputStream in;
try {
in = new PackInputStream(pack, objectOffset + headerLength, ctx);

45
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java

@ -729,7 +729,9 @@ public class GC {
long time = System.currentTimeMillis();
Collection<Ref> refsBefore = getAllRefs();
Set<ObjectId> allHeadsAndTags = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>();
Set<ObjectId> allTags = new HashSet<>();
Set<ObjectId> nonHeads = new HashSet<>();
Set<ObjectId> txnHeads = new HashSet<>();
Set<ObjectId> tagTargets = new HashSet<>();
@ -739,17 +741,22 @@ public class GC {
for (Ref ref : refsBefore) {
checkCancelled();
nonHeads.addAll(listRefLogObjects(ref, 0));
if (ref.isSymbolic() || ref.getObjectId() == null)
if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
if (isHead(ref) || isTag(ref))
}
if (isHead(ref)) {
allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName()))
} else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
else
} else {
nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null)
}
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
}
}
List<ObjectIdSet> excluded = new LinkedList<>();
for (final PackFile f : repo.getObjectDatabase().getPacks()) {
@ -758,13 +765,19 @@ public class GC {
excluded.add(f.getIndex());
}
tagTargets.addAll(allHeads);
// Don't exclude tags that are also branch tips
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects);
List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null;
if (!allHeads.isEmpty()) {
heads = writePack(allHeads, Collections.<ObjectId> emptySet(),
if (!allHeadsAndTags.isEmpty()) {
heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
tagTargets, excluded);
if (heads != null) {
ret.add(heads);
@ -772,12 +785,14 @@ public class GC {
}
}
if (!nonHeads.isEmpty()) {
PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded);
PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
tagTargets, excluded);
if (rest != null)
ret.add(rest);
}
if (!txnHeads.isEmpty()) {
PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded);
PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
null, excluded);
if (txn != null)
ret.add(txn);
}
@ -961,8 +976,9 @@ public class GC {
}
private PackFile writePack(@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets,
List<ObjectIdSet> excludeObjects) throws IOException {
@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
throws IOException {
checkCancelled();
File tmpPack = null;
Map<PackExt, File> tmpExts = new TreeMap<>(
@ -988,12 +1004,13 @@ public class GC {
// prepare the PackWriter
pw.setDeltaBaseAsOffset(true);
pw.setReuseDeltaCommits(false);
if (tagTargets != null)
if (tagTargets != null) {
pw.setTagTargets(tagTargets);
}
if (excludeObjects != null)
for (ObjectIdSet idx : excludeObjects)
pw.excludeObjects(idx);
pw.preparePack(pm, want, have);
pw.preparePack(pm, want, have, PackWriter.NONE, tags);
if (pw.getObjectCount() == 0)
return null;
checkCancelled();

96
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java

@ -233,7 +233,9 @@ public class PackWriter implements AutoCloseable {
private List<CachedPack> cachedPacks = new ArrayList<>(2);
private Set<ObjectId> tagTargets = Collections.emptySet();
private Set<ObjectId> tagTargets = NONE;
private Set<? extends ObjectId> excludeFromBitmapSelection = NONE;
private ObjectIdSet[] excludeInPacks;
@ -712,8 +714,7 @@ public class PackWriter implements AutoCloseable {
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException {
preparePack(countingMonitor,
want, have, Collections.<ObjectId> emptySet());
preparePack(countingMonitor, want, have, NONE, NONE);
}
/**
@ -721,9 +722,9 @@ public class PackWriter implements AutoCloseable {
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not
* be shallow any more ("unshallow" commits as in {@link #setShallowPack})
* from the shallow set.
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
@ -731,27 +732,67 @@ public class PackWriter implements AutoCloseable {
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including
* {@code shallow} commits) do not need to be included in the
* pack because they are already available from elsewhere.
* Must not be {@code null}.
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those
* parents are considered not to be already available.
* Parents of {@code shallow} commits and earlier generations
* will be included in the pack if requested by {@code want}.
* Must not be {@code null}.
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @throws IOException
* an I/O problem occured while reading objects.
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow) throws IOException {
preparePack(countingMonitor, want, have, shallow, NONE);
}
/**
* Prepare the list of objects to be written to the pack stream.
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
* @param want
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
try (ObjectWalk ow = getObjectWalk()) {
ow.assumeShallow(shallow);
preparePack(countingMonitor, ow, want, have);
preparePack(countingMonitor, ow, want, have, noBitmaps);
}
}
@ -784,13 +825,17 @@ public class PackWriter implements AutoCloseable {
* points of graph traversal). Pass {@link #NONE} if all objects
* reachable from {@code want} are desired, such as when serving
* a clone.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* when some I/O problem occur during reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull ObjectWalk walk,
@NonNull Set<? extends ObjectId> interestingObjects,
@NonNull Set<? extends ObjectId> uninterestingObjects)
@NonNull Set<? extends ObjectId> uninterestingObjects,
@NonNull Set<? extends ObjectId> noBitmaps)
throws IOException {
if (countingMonitor == null)
countingMonitor = NullProgressMonitor.INSTANCE;
@ -798,7 +843,7 @@ public class PackWriter implements AutoCloseable {
throw new IllegalArgumentException(
JGitText.get().shallowPacksRequireDepthWalk);
findObjectsToPack(countingMonitor, walk, interestingObjects,
uninterestingObjects);
uninterestingObjects, noBitmaps);
}
/**
@ -965,8 +1010,9 @@ public class PackWriter implements AutoCloseable {
/**
* Write the prepared pack to the supplied stream.
* <p>
* Called after {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set)}
* or {@link #preparePack(ProgressMonitor, Set, Set)}.
* Called after
* {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set, Set)} or
* {@link #preparePack(ProgressMonitor, Set, Set)}.
* <p>
* Performs delta search if enabled and writes the pack stream.
* <p>
@ -1652,12 +1698,14 @@ public class PackWriter implements AutoCloseable {
private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor,
@NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException {
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
final long countingStart = System.currentTimeMillis();
beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN);
stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want));
stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have));
excludeFromBitmapSelection = noBitmaps;
canBuildBitmaps = config.isBuildBitmaps()
&& !shallowPack
@ -2070,8 +2118,8 @@ public class PackWriter implements AutoCloseable {
PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer(
reader, writeBitmaps, pm, stats.interestingObjects, config);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits =
bitmapPreparer.selectCommits(numCommits);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = bitmapPreparer
.selectCommits(numCommits, excludeFromBitmapSelection);
beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size());

18
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java

@ -141,6 +141,8 @@ class PackWriterBitmapPreparer {
*
* @param expectedCommitCount
* count of commits in the pack
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return commit objects for which bitmap indices should be built
* @throws IncorrectObjectTypeException
* if any of the processed objects is not a commit
@ -149,7 +151,8 @@ class PackWriterBitmapPreparer {
* @throws MissingObjectException
* if an expected object is missing
*/
Collection<BitmapCommit> selectCommits(int expectedCommitCount)
Collection<BitmapCommit> selectCommits(int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
/*
@ -164,7 +167,7 @@ class PackWriterBitmapPreparer {
RevWalk rw = new RevWalk(reader);
rw.setRetainBody(false);
CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw,
expectedCommitCount);
expectedCommitCount, excludeFromBitmapSelection);
pm.endTask();
int totCommits = selectionHelper.getCommitCount();
@ -363,6 +366,8 @@ class PackWriterBitmapPreparer {
* @param expectedCommitCount
* expected count of commits. The actual count may be less due to
* unreachable garbage.
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return a {@link CommitSelectionHelper} containing bitmaps for the tip
* commits
* @throws IncorrectObjectTypeException
@ -373,8 +378,10 @@ class PackWriterBitmapPreparer {
* if an expected object is missing
*/
private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
int expectedCommitCount) throws IncorrectObjectTypeException,
IOException, MissingObjectException {
int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder();
List<BitmapCommit> reuseCommits = new ArrayList<>();
for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) {
@ -403,7 +410,8 @@ class PackWriterBitmapPreparer {
Set<RevCommit> peeledWant = new HashSet<>(want.size());
for (AnyObjectId objectId : want) {
RevObject ro = rw.peel(rw.parseAny(objectId));
if (!(ro instanceof RevCommit) || reuse.contains(ro)) {
if (!(ro instanceof RevCommit) || reuse.contains(ro)
|| excludeFromBitmapSelection.contains(ro)) {
continue;
}

2
org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java

@ -1523,7 +1523,7 @@ public class UploadPack {
walk.reset();
ObjectWalk ow = rw.toObjectWalkWithSameObjects();
pw.preparePack(pm, ow, wantAll, commonBase);
pw.preparePack(pm, ow, wantAll, commonBase, PackWriter.NONE);
rw = ow;
}

8
org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java

@ -497,7 +497,13 @@ public abstract class FS {
if (env != null) {
pb.environment().putAll(env);
}
Process p = pb.start();
Process p;
try {
p = pb.start();
} catch (IOException e) {
// Process failed to start
throw new CommandFailedException(-1, e.getMessage(), e);
}
p.getOutputStream().close();
GobblerThread gobbler = new GobblerThread(p, command, dir);
gobbler.start();

Loading…
Cancel
Save