Browse Source

Always send refs' objects despite "filter" in pack

In a0c9016abd ("upload-pack: send refs' objects despite "filter"",
2018-07-09), Git updated the "filter" option in the fetch-pack
upload-pack protocol to not filter objects explicitly specified in
"want" lines, even if they match the criterion of the filter. Update
JGit to match that behavior.

Change-Id: Ia4d74326edb89e61062e397e05483298c50f9232
Signed-off-by: Jonathan Tan <jonathantanmy@google.com>
stable-5.1
Jonathan Tan 6 years ago committed by Jonathan Nieder
parent
commit
32798dcfdb
  1. 79
      org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/UploadPackTest.java
  2. 10
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java

79
org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/UploadPackTest.java

@ -226,6 +226,44 @@ public class UploadPackTest {
} }
} }
@Test
public void testFetchExplicitBlobWithFilter() throws Exception {
InMemoryRepository server2 = newRepo("server2");
TestRepository<InMemoryRepository> remote2 =
new TestRepository<>(server2);
RevBlob blob1 = remote2.blob("foobar");
RevBlob blob2 = remote2.blob("fooba");
RevTree tree = remote2.tree(remote2.file("1", blob1),
remote2.file("2", blob2));
RevCommit commit = remote2.commit(tree);
remote2.update("master", commit);
remote2.update("a_blob", blob1);
server2.getConfig().setBoolean("uploadpack", null, "allowfilter", true);
testProtocol = new TestProtocol<>(
new UploadPackFactory<Object>() {
@Override
public UploadPack create(Object req, Repository db)
throws ServiceNotEnabledException,
ServiceNotAuthorizedException {
UploadPack up = new UploadPack(db);
return up;
}
}, null);
uri = testProtocol.register(ctx, server2);
try (Transport tn = testProtocol.open(uri, client, "server2")) {
tn.setFilterBlobLimit(0);
tn.fetch(NullProgressMonitor.INSTANCE, Arrays.asList(
new RefSpec(commit.name()),
new RefSpec(blob1.name())));
assertTrue(client.hasObject(tree.toObjectId()));
assertTrue(client.hasObject(blob1.toObjectId()));
assertFalse(client.hasObject(blob2.toObjectId()));
}
}
@Test @Test
public void testFetchWithBlobLimitFilter() throws Exception { public void testFetchWithBlobLimitFilter() throws Exception {
InMemoryRepository server2 = newRepo("server2"); InMemoryRepository server2 = newRepo("server2");
@ -261,6 +299,47 @@ public class UploadPackTest {
} }
} }
@Test
public void testFetchExplicitBlobWithFilterAndBitmaps() throws Exception {
InMemoryRepository server2 = newRepo("server2");
TestRepository<InMemoryRepository> remote2 =
new TestRepository<>(server2);
RevBlob blob1 = remote2.blob("foobar");
RevBlob blob2 = remote2.blob("fooba");
RevTree tree = remote2.tree(remote2.file("1", blob1),
remote2.file("2", blob2));
RevCommit commit = remote2.commit(tree);
remote2.update("master", commit);
remote2.update("a_blob", blob1);
server2.getConfig().setBoolean("uploadpack", null, "allowfilter", true);
// generate bitmaps
new DfsGarbageCollector(server2).pack(null);
server2.scanForRepoChanges();
testProtocol = new TestProtocol<>(
new UploadPackFactory<Object>() {
@Override
public UploadPack create(Object req, Repository db)
throws ServiceNotEnabledException,
ServiceNotAuthorizedException {
UploadPack up = new UploadPack(db);
return up;
}
}, null);
uri = testProtocol.register(ctx, server2);
try (Transport tn = testProtocol.open(uri, client, "server2")) {
tn.setFilterBlobLimit(0);
tn.fetch(NullProgressMonitor.INSTANCE, Arrays.asList(
new RefSpec(commit.name()),
new RefSpec(blob1.name())));
assertTrue(client.hasObject(blob1.toObjectId()));
assertFalse(client.hasObject(blob2.toObjectId()));
}
}
@Test @Test
public void testFetchWithBlobLimitFilterAndBitmaps() throws Exception { public void testFetchWithBlobLimitFilterAndBitmaps() throws Exception {
InMemoryRepository server2 = newRepo("server2"); InMemoryRepository server2 = newRepo("server2");

10
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java

@ -1970,7 +1970,7 @@ public class PackWriter implements AutoCloseable {
byte[] pathBuf = walker.getPathBuffer(); byte[] pathBuf = walker.getPathBuffer();
int pathLen = walker.getPathLength(); int pathLen = walker.getPathLength();
bases.addBase(o.getType(), pathBuf, pathLen, pathHash); bases.addBase(o.getType(), pathBuf, pathLen, pathHash);
filterAndAddObject(o, o.getType(), pathHash); filterAndAddObject(o, o.getType(), pathHash, want);
countingMonitor.update(1); countingMonitor.update(1);
} }
} else { } else {
@ -1980,7 +1980,7 @@ public class PackWriter implements AutoCloseable {
continue; continue;
if (exclude(o)) if (exclude(o))
continue; continue;
filterAndAddObject(o, o.getType(), walker.getPathHashCode()); filterAndAddObject(o, o.getType(), walker.getPathHashCode(), want);
countingMonitor.update(1); countingMonitor.update(1);
} }
} }
@ -2013,7 +2013,7 @@ public class PackWriter implements AutoCloseable {
needBitmap.remove(objectId); needBitmap.remove(objectId);
continue; continue;
} }
filterAndAddObject(objectId, obj.getType(), 0); filterAndAddObject(objectId, obj.getType(), 0, want);
} }
if (thin) if (thin)
@ -2075,12 +2075,14 @@ public class PackWriter implements AutoCloseable {
// Adds the given object as an object to be packed, first performing // Adds the given object as an object to be packed, first performing
// filtering on blobs at or exceeding a given size. // filtering on blobs at or exceeding a given size.
private void filterAndAddObject(@NonNull AnyObjectId src, int type, private void filterAndAddObject(@NonNull AnyObjectId src, int type,
int pathHashCode) throws IOException { int pathHashCode, @NonNull Set<? extends AnyObjectId> want)
throws IOException {
// Check if this object needs to be rejected, doing the cheaper // Check if this object needs to be rejected, doing the cheaper
// checks first. // checks first.
boolean reject = filterBlobLimit >= 0 && boolean reject = filterBlobLimit >= 0 &&
type == OBJ_BLOB && type == OBJ_BLOB &&
!want.contains(src) &&
reader.getObjectSize(src, OBJ_BLOB) > filterBlobLimit; reader.getObjectSize(src, OBJ_BLOB) > filterBlobLimit;
if (!reject) { if (!reject) {
addObject(src, type, pathHashCode); addObject(src, type, pathHashCode);

Loading…
Cancel
Save