Browse Source

dfs: Switch InMemoryRepository to DfsReftableDatabase

This ensure DfsReftableDatabase is tested by the same test suites that
use/test InMemoryRepository. It also simplifies the logic of
InMemoryRepository and brings its compatibility story closer to any
other DFS repository that uses reftables for its reference storage.

Change-Id: I881469fd77ed11a9239b477633510b8c482a19ca
Signed-off-by: Minh Thai <mthai@google.com>
Signed-off-by: Terry Parker <tparker@google.com>
stable-4.10
Shawn Pearce 7 years ago committed by Terry Parker
parent
commit
7cd5d77ae3
  1. 1
      org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
  2. 7
      org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java
  3. 9
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
  4. 3
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
  5. 6
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java
  6. 238
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
  7. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java

1
org.eclipse.jgit.http.test/META-INF/MANIFEST.MF

@ -31,6 +31,7 @@ Import-Package: javax.servlet;version="[2.5.0,3.2.0)",
org.eclipse.jgit.internal;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.dfs;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal.storage.dfs;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.file;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal.storage.file;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.reftable;version="[4.9.0,4.11.0)",
org.eclipse.jgit.junit;version="[4.10.0,4.11.0)", org.eclipse.jgit.junit;version="[4.10.0,4.11.0)",
org.eclipse.jgit.junit.http;version="[4.10.0,4.11.0)", org.eclipse.jgit.junit.http;version="[4.10.0,4.11.0)",
org.eclipse.jgit.lib;version="[4.10.0,4.11.0)", org.eclipse.jgit.lib;version="[4.10.0,4.11.0)",

7
org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java

@ -46,6 +46,7 @@ import java.io.IOException;
import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription; import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription;
import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository; import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository;
import org.eclipse.jgit.internal.storage.reftable.Reftable;
import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.RefDatabase;
/** /**
@ -80,14 +81,12 @@ class RefsUnreadableInMemoryRepository extends InMemoryRepository {
} }
private class RefsUnreadableRefDatabase extends MemRefDatabase { private class RefsUnreadableRefDatabase extends MemRefDatabase {
@Override @Override
protected RefCache scanAllRefs() throws IOException { protected Reftable reader() throws IOException {
if (failing) { if (failing) {
throw new IOException("disk failed, no refs found"); throw new IOException("disk failed, no refs found");
} else {
return super.scanAllRefs();
} }
return super.reader();
} }
} }
} }

9
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java

@ -14,7 +14,6 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections; import java.util.Collections;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -722,7 +721,7 @@ public class DfsGarbageCollectorTest {
DfsPackDescription t1 = odb.newPack(INSERT); DfsPackDescription t1 = odb.newPack(INSERT);
try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) { try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
out.write("ignored".getBytes(StandardCharsets.UTF_8)); new ReftableWriter().begin(out).finish();
t1.addFileExt(REFTABLE); t1.addFileExt(REFTABLE);
} }
odb.commitPack(Collections.singleton(t1), null); odb.commitPack(Collections.singleton(t1), null);
@ -739,9 +738,9 @@ public class DfsGarbageCollectorTest {
assertTrue("commit0 in pack", isObjectInPack(commit0, pack)); assertTrue("commit0 in pack", isObjectInPack(commit0, pack));
assertTrue("commit1 in pack", isObjectInPack(commit1, pack)); assertTrue("commit1 in pack", isObjectInPack(commit1, pack));
// Only INSERT REFTABLE above is present. // A GC and the older INSERT REFTABLE above is present.
DfsReftable[] tables = odb.getReftables(); DfsReftable[] tables = odb.getReftables();
assertEquals(1, tables.length); assertEquals(2, tables.length);
assertEquals(t1, tables[0].getPackDescription()); assertEquals(t1, tables[0].getPackDescription());
} }
@ -754,7 +753,7 @@ public class DfsGarbageCollectorTest {
DfsPackDescription t1 = odb.newPack(INSERT); DfsPackDescription t1 = odb.newPack(INSERT);
try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) { try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
out.write("ignored".getBytes(StandardCharsets.UTF_8)); new ReftableWriter().begin(out).finish();
t1.addFileExt(REFTABLE); t1.addFileExt(REFTABLE);
} }
odb.commitPack(Collections.singleton(t1), null); odb.commitPack(Collections.singleton(t1), null);

3
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java

@ -446,8 +446,9 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
// add, as the pack was already committed via commitPack(). // add, as the pack was already committed via commitPack().
// If this is the case return without changing the list. // If this is the case return without changing the list.
for (DfsPackFile p : o.packs) { for (DfsPackFile p : o.packs) {
if (p == newPack) if (p.key.equals(newPack.key)) {
return; return;
}
} }
} }

6
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java

@ -53,6 +53,7 @@ import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.reftable.MergedReftable; import org.eclipse.jgit.internal.storage.reftable.MergedReftable;
import org.eclipse.jgit.internal.storage.reftable.RefCursor; import org.eclipse.jgit.internal.storage.reftable.RefCursor;
import org.eclipse.jgit.internal.storage.reftable.Reftable; import org.eclipse.jgit.internal.storage.reftable.Reftable;
import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
@ -102,6 +103,11 @@ public class DfsReftableDatabase extends DfsRefDatabase {
return new ReftableBatchRefUpdate(this, odb); return new ReftableBatchRefUpdate(this, odb);
} }
/** @return configuration to write new reftables with. */
public ReftableConfig getReftableConfig() {
return new ReftableConfig(getRepository().getConfig());
}
/** @return the lock protecting this instance's state. */ /** @return the lock protecting this instance's state. */
protected ReentrantLock getLock() { protected ReentrantLock getLock() {
return lock; return lock;

238
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java

@ -6,30 +6,13 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.pack.PackExt; import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Ref.Storage;
import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.RefDatabase;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.ReceiveCommand;
import org.eclipse.jgit.util.RefList;
/** /**
* Git repository stored entirely in the local process memory. * Git repository stored entirely in the local process memory.
@ -54,9 +37,8 @@ public class InMemoryRepository extends DfsRepository {
static final AtomicInteger packId = new AtomicInteger(); static final AtomicInteger packId = new AtomicInteger();
private final MemObjDatabase objdb; private final MemObjDatabase objdb;
private final RefDatabase refdb; private final MemRefDatabase refdb;
private String gitwebDescription; private String gitwebDescription;
private boolean performsAtomicTransactions = true;
/** /**
* Initialize a new in-memory repository. * Initialize a new in-memory repository.
@ -92,7 +74,7 @@ public class InMemoryRepository extends DfsRepository {
* @param atomic * @param atomic
*/ */
public void setPerformsAtomicTransactions(boolean atomic) { public void setPerformsAtomicTransactions(boolean atomic) {
performsAtomicTransactions = atomic; refdb.performsAtomicTransactions = atomic;
} }
@Override @Override
@ -148,6 +130,7 @@ public class InMemoryRepository extends DfsRepository {
if (replace != null) if (replace != null)
n.removeAll(replace); n.removeAll(replace);
packs = n; packs = n;
clearCache();
} }
@Override @Override
@ -159,37 +142,43 @@ public class InMemoryRepository extends DfsRepository {
protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext) protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext)
throws FileNotFoundException, IOException { throws FileNotFoundException, IOException {
MemPack memPack = (MemPack) desc; MemPack memPack = (MemPack) desc;
byte[] file = memPack.fileMap.get(ext); byte[] file = memPack.get(ext);
if (file == null) if (file == null)
throw new FileNotFoundException(desc.getFileName(ext)); throw new FileNotFoundException(desc.getFileName(ext));
return new ByteArrayReadableChannel(file, blockSize); return new ByteArrayReadableChannel(file, blockSize);
} }
@Override @Override
protected DfsOutputStream writeFile( protected DfsOutputStream writeFile(DfsPackDescription desc,
DfsPackDescription desc, final PackExt ext) throws IOException { PackExt ext) throws IOException {
final MemPack memPack = (MemPack) desc; MemPack memPack = (MemPack) desc;
return new Out() { return new Out() {
@Override @Override
public void flush() { public void flush() {
memPack.fileMap.put(ext, getData()); memPack.put(ext, getData());
} }
}; };
} }
} }
private static class MemPack extends DfsPackDescription { private static class MemPack extends DfsPackDescription {
final Map<PackExt, byte[]> final byte[][] fileMap = new byte[PackExt.values().length][];
fileMap = new HashMap<>();
MemPack(String name, DfsRepositoryDescription repoDesc) { MemPack(String name, DfsRepositoryDescription repoDesc) {
super(repoDesc, name); super(repoDesc, name);
} }
void put(PackExt ext, byte[] data) {
fileMap[ext.getPosition()] = data;
}
byte[] get(PackExt ext) {
return fileMap[ext.getPosition()];
}
} }
private abstract static class Out extends DfsOutputStream { private abstract static class Out extends DfsOutputStream {
private final ByteArrayOutputStream dst = new ByteArrayOutputStream(); private final ByteArrayOutputStream dst = new ByteArrayOutputStream();
private byte[] data; private byte[] data;
@Override @Override
@ -221,7 +210,6 @@ public class InMemoryRepository extends DfsRepository {
public void close() { public void close() {
flush(); flush();
} }
} }
private static class ByteArrayReadableChannel implements ReadableChannel { private static class ByteArrayReadableChannel implements ReadableChannel {
@ -281,193 +269,27 @@ public class InMemoryRepository extends DfsRepository {
} }
} }
/** /** DfsRefDatabase used by InMemoryRepository. */
* A ref database storing all refs in-memory. protected class MemRefDatabase extends DfsReftableDatabase {
* <p> boolean performsAtomicTransactions = true;
* This class is protected (and not private) to facilitate testing using
* subclasses of InMemoryRepository.
*/
protected class MemRefDatabase extends DfsRefDatabase {
private final ConcurrentMap<String, Ref> refs = new ConcurrentHashMap<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock(true /* fair */);
/** /** Initialize a new in-memory ref database. */
* Initialize a new in-memory ref database.
*/
protected MemRefDatabase() { protected MemRefDatabase() {
super(InMemoryRepository.this); super(InMemoryRepository.this);
} }
@Override @Override
public boolean performsAtomicTransactions() { public ReftableConfig getReftableConfig() {
return performsAtomicTransactions; ReftableConfig cfg = new ReftableConfig();
} cfg.setAlignBlocks(false);
cfg.setIndexObjects(false);
@Override cfg.fromConfig(getRepository().getConfig());
public BatchRefUpdate newBatchUpdate() { return cfg;
return new BatchRefUpdate(this) {
@Override
public void execute(RevWalk walk, ProgressMonitor monitor)
throws IOException {
if (performsAtomicTransactions() && isAtomic()) {
try {
lock.writeLock().lock();
batch(getCommands());
} finally {
lock.writeLock().unlock();
}
} else {
super.execute(walk, monitor);
}
}
};
}
@Override
protected RefCache scanAllRefs() throws IOException {
RefList.Builder<Ref> ids = new RefList.Builder<>();
RefList.Builder<Ref> sym = new RefList.Builder<>();
try {
lock.readLock().lock();
for (Ref ref : refs.values()) {
if (ref.isSymbolic())
sym.add(ref);
ids.add(ref);
}
} finally {
lock.readLock().unlock();
}
ids.sort();
sym.sort();
objdb.getCurrentPackList().markDirty();
return new RefCache(ids.toRefList(), sym.toRefList());
}
private void batch(List<ReceiveCommand> cmds) {
// Validate that the target exists in a new RevWalk, as the RevWalk
// from the RefUpdate might be reading back unflushed objects.
Map<ObjectId, ObjectId> peeled = new HashMap<>();
try (RevWalk rw = new RevWalk(getRepository())) {
for (ReceiveCommand c : cmds) {
if (c.getResult() != ReceiveCommand.Result.NOT_ATTEMPTED) {
ReceiveCommand.abort(cmds);
return;
}
if (!ObjectId.zeroId().equals(c.getNewId())) {
try {
RevObject o = rw.parseAny(c.getNewId());
if (o instanceof RevTag) {
peeled.put(o, rw.peel(o).copy());
}
} catch (IOException e) {
c.setResult(ReceiveCommand.Result.REJECTED_MISSING_OBJECT);
ReceiveCommand.abort(cmds);
return;
}
}
}
}
// Check all references conform to expected old value.
for (ReceiveCommand c : cmds) {
Ref r = refs.get(c.getRefName());
if (r == null) {
if (c.getType() != ReceiveCommand.Type.CREATE) {
c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
ReceiveCommand.abort(cmds);
return;
}
} else {
ObjectId objectId = r.getObjectId();
if (r.isSymbolic() || objectId == null
|| !objectId.equals(c.getOldId())) {
c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
ReceiveCommand.abort(cmds);
return;
}
}
}
// Write references.
for (ReceiveCommand c : cmds) {
if (c.getType() == ReceiveCommand.Type.DELETE) {
refs.remove(c.getRefName());
c.setResult(ReceiveCommand.Result.OK);
continue;
}
ObjectId p = peeled.get(c.getNewId());
Ref r;
if (p != null) {
r = new ObjectIdRef.PeeledTag(Storage.PACKED,
c.getRefName(), c.getNewId(), p);
} else {
r = new ObjectIdRef.PeeledNonTag(Storage.PACKED,
c.getRefName(), c.getNewId());
}
refs.put(r.getName(), r);
c.setResult(ReceiveCommand.Result.OK);
}
clearCache();
}
@Override
protected boolean compareAndPut(Ref oldRef, Ref newRef)
throws IOException {
try {
lock.writeLock().lock();
ObjectId id = newRef.getObjectId();
if (id != null) {
try (RevWalk rw = new RevWalk(getRepository())) {
// Validate that the target exists in a new RevWalk, as the RevWalk
// from the RefUpdate might be reading back unflushed objects.
rw.parseAny(id);
}
}
String name = newRef.getName();
if (oldRef == null)
return refs.putIfAbsent(name, newRef) == null;
Ref cur = refs.get(name);
if (cur != null) {
if (eq(cur, oldRef))
return refs.replace(name, cur, newRef);
}
if (oldRef.getStorage() == Storage.NEW)
return refs.putIfAbsent(name, newRef) == null;
return false;
} finally {
lock.writeLock().unlock();
}
} }
@Override @Override
protected boolean compareAndRemove(Ref oldRef) throws IOException { public boolean performsAtomicTransactions() {
try { return performsAtomicTransactions;
lock.writeLock().lock();
String name = oldRef.getName();
Ref cur = refs.get(name);
if (cur != null && eq(cur, oldRef))
return refs.remove(name, cur);
else
return false;
} finally {
lock.writeLock().unlock();
}
}
private boolean eq(Ref a, Ref b) {
if (!Objects.equals(a.getName(), b.getName()))
return false;
if (a.isSymbolic() != b.isSymbolic())
return false;
if (a.isSymbolic())
return Objects.equals(a.getTarget().getName(), b.getTarget().getName());
else
return Objects.equals(a.getObjectId(), b.getObjectId());
} }
} }
} }

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java

@ -116,7 +116,7 @@ public class ReftableBatchRefUpdate extends BatchRefUpdate {
this.refdb = refdb; this.refdb = refdb;
this.odb = odb; this.odb = odb;
lock = refdb.getLock(); lock = refdb.getLock();
reftableConfig = new ReftableConfig(refdb.getRepository().getConfig()); reftableConfig = refdb.getReftableConfig();
} }
@Override @Override

Loading…
Cancel
Save