Browse Source

Fix javadoc in org.eclipse.jgit dfs package

Change-Id: I1f5e3dc3ba34b323ee7244dbefee207ce19e6021
Signed-off-by: Matthias Sohn <matthias.sohn@sap.com>
stable-4.10
Matthias Sohn 7 years ago
parent
commit
a224b78675
  1. 16
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BeforeDfsPackIndexLoadedEvent.java
  2. 3
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BeforeDfsPackIndexLoadedListener.java
  3. 38
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
  4. 29
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
  5. 13
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsCachedPack.java
  6. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsConfig.java
  7. 15
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java
  8. 80
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
  9. 17
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
  10. 60
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
  11. 4
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjectRepresentation.java
  12. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjectToPack.java
  13. 4
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsOutputStream.java
  14. 24
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
  15. 102
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java
  16. 12
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
  17. 28
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java
  18. 7
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPacksChangedEvent.java
  19. 4
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPacksChangedListener.java
  20. 31
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
  21. 64
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
  22. 28
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
  23. 28
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefDatabase.java
  24. 1
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefRename.java
  25. 8
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefUpdate.java
  26. 12
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java
  27. 39
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java
  28. 19
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepository.java
  29. 29
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepositoryBuilder.java
  30. 17
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepositoryDescription.java
  31. 11
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java
  32. 10
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java
  33. 5
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
  34. 5
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
  35. 3
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackInputStream.java
  36. 19
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReadableChannel.java
  37. 6
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java
  38. 12
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java

16
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BeforeDfsPackIndexLoadedEvent.java

@ -46,10 +46,10 @@ package org.eclipse.jgit.internal.storage.dfs;
import org.eclipse.jgit.events.RepositoryEvent;
/**
* Describes the {@link DfsPackFile} just before its index is loaded. Currently,
* DfsPackFile directly dispatches the event on
* {@link org.eclipse.jgit.lib.Repository#getGlobalListenerList}. Which means
* the call to {@link #getRepository} will always return null.
* Describes the {@link org.eclipse.jgit.internal.storage.dfs.DfsPackFile} just
* before its index is loaded. Currently, DfsPackFile directly dispatches the
* event on {@link org.eclipse.jgit.lib.Repository#getGlobalListenerList}. Which
* means the call to {@link #getRepository} will always return null.
*/
public class BeforeDfsPackIndexLoadedEvent
extends RepositoryEvent<BeforeDfsPackIndexLoadedListener> {
@ -65,16 +65,22 @@ public class BeforeDfsPackIndexLoadedEvent
this.pack = pack;
}
/** @return the PackFile containing the index that will be loaded. */
/**
* Get the PackFile containing the index that will be loaded.
*
* @return the PackFile containing the index that will be loaded.
*/
public DfsPackFile getPackFile() {
return pack;
}
/** {@inheritDoc} */
@Override
public Class<BeforeDfsPackIndexLoadedListener> getListenerType() {
return BeforeDfsPackIndexLoadedListener.class;
}
/** {@inheritDoc} */
@Override
public void dispatch(BeforeDfsPackIndexLoadedListener listener) {
listener.onBeforeDfsPackIndexLoaded(this);

3
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BeforeDfsPackIndexLoadedListener.java

@ -46,7 +46,8 @@ package org.eclipse.jgit.internal.storage.dfs;
import org.eclipse.jgit.events.RepositoryListener;
/**
* Receives {@link BeforeDfsPackIndexLoadedEvent}s.
* Receives
* {@link org.eclipse.jgit.internal.storage.dfs.BeforeDfsPackIndexLoadedEvent}s.
*/
public interface BeforeDfsPackIndexLoadedListener extends RepositoryListener {
/**

38
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java

@ -56,7 +56,9 @@ import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.pack.PackExt;
/**
* Caches slices of a {@link BlockBasedFile} in memory for faster read access.
* Caches slices of a
* {@link org.eclipse.jgit.internal.storage.dfs.BlockBasedFile} in memory for
* faster read access.
* <p>
* The DfsBlockCache serves as a Java based "buffer cache", loading segments of
* a BlockBasedFile into the JVM heap prior to use. As JGit often wants to do
@ -103,7 +105,7 @@ public final class DfsBlockCache {
*
* @param cfg
* the new window cache configuration.
* @throws IllegalArgumentException
* @throws java.lang.IllegalArgumentException
* the cache configuration contains one or more invalid
* settings, usually too low of a limit.
*/
@ -111,7 +113,11 @@ public final class DfsBlockCache {
cache = new DfsBlockCache(cfg);
}
/** @return the currently active DfsBlockCache. */
/**
* Get the currently active DfsBlockCache.
*
* @return the currently active DfsBlockCache.
*/
public static DfsBlockCache getInstance() {
return cache;
}
@ -207,17 +213,27 @@ public final class DfsBlockCache {
return length <= maxStreamThroughCache;
}
/** @return total number of bytes in the cache, per pack file extension. */
/**
* Get total number of bytes in the cache, per pack file extension.
*
* @return total number of bytes in the cache, per pack file extension.
*/
public long[] getCurrentSize() {
return getStatVals(liveBytes);
}
/** @return 0..100, defining how full the cache is. */
/**
* Get 0..100, defining how full the cache is.
*
* @return 0..100, defining how full the cache is.
*/
public long getFillPercentage() {
return LongStream.of(getCurrentSize()).sum() * 100 / maxBytes;
}
/**
* Get number of requests for items in the cache, per pack file extension.
*
* @return number of requests for items in the cache, per pack file
* extension.
*/
@ -226,6 +242,9 @@ public final class DfsBlockCache {
}
/**
* Get number of requests for items not in the cache, per pack file
* extension.
*
* @return number of requests for items not in the cache, per pack file
* extension.
*/
@ -234,6 +253,8 @@ public final class DfsBlockCache {
}
/**
* Get total number of requests (hit + miss), per pack file extension.
*
* @return total number of requests (hit + miss), per pack file extension.
*/
public long[] getTotalRequestCount() {
@ -250,7 +271,9 @@ public final class DfsBlockCache {
}
/**
* @return 0..100, defining number of cache hits, per pack file extension.
* Get hit ratios
*
* @return hit ratios
*/
public long[] getHitRatio() {
AtomicLong[] hit = statHit.get();
@ -272,6 +295,9 @@ public final class DfsBlockCache {
}
/**
* Get number of evictions performed due to cache being full, per pack file
* extension.
*
* @return number of evictions performed due to cache being full, per pack
* file extension.
*/

29
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java

@ -55,7 +55,10 @@ import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Config;
/** Configuration parameters for {@link DfsBlockCache}. */
/**
* Configuration parameters for
* {@link org.eclipse.jgit.internal.storage.dfs.DfsBlockCache}.
*/
public class DfsBlockCacheConfig {
/** 1024 (number of bytes in one kibibyte/kilobyte) */
public static final int KB = 1024;
@ -68,7 +71,9 @@ public class DfsBlockCacheConfig {
private double streamRatio;
private int concurrencyLevel;
/** Create a default configuration. */
/**
* Create a default configuration.
*/
public DfsBlockCacheConfig() {
setBlockLimit(32 * MB);
setBlockSize(64 * KB);
@ -77,6 +82,9 @@ public class DfsBlockCacheConfig {
}
/**
* Get maximum number bytes of heap memory to dedicate to caching pack file
* data.
*
* @return maximum number bytes of heap memory to dedicate to caching pack
* file data. <b>Default is 32 MB.</b>
*/
@ -85,6 +93,9 @@ public class DfsBlockCacheConfig {
}
/**
* Set maximum number bytes of heap memory to dedicate to caching pack file
* data.
*
* @param newLimit
* maximum number bytes of heap memory to dedicate to caching
* pack file data.
@ -96,6 +107,9 @@ public class DfsBlockCacheConfig {
}
/**
* Get size in bytes of a single window mapped or read in from the pack
* file.
*
* @return size in bytes of a single window mapped or read in from the pack
* file. <b>Default is 64 KB.</b>
*/
@ -104,6 +118,8 @@ public class DfsBlockCacheConfig {
}
/**
* Set size in bytes of a single window read in from the pack file.
*
* @param newSize
* size in bytes of a single window read in from the pack file.
* The value must be a power of 2.
@ -120,6 +136,8 @@ public class DfsBlockCacheConfig {
}
/**
* Get the estimated number of threads concurrently accessing the cache.
*
* @return the estimated number of threads concurrently accessing the cache.
* <b>Default is 32.</b>
*/
@ -128,6 +146,8 @@ public class DfsBlockCacheConfig {
}
/**
* Set the estimated number of threads concurrently accessing the cache.
*
* @param newConcurrencyLevel
* the estimated number of threads concurrently accessing the
* cache.
@ -140,6 +160,9 @@ public class DfsBlockCacheConfig {
}
/**
* Get highest percentage of {@link #getBlockLimit()} a single pack can
* occupy while being copied by the pack reuse strategy.
*
* @return highest percentage of {@link #getBlockLimit()} a single pack can
* occupy while being copied by the pack reuse strategy. <b>Default
* is 0.30, or 30%</b>.
@ -149,6 +172,8 @@ public class DfsBlockCacheConfig {
}
/**
* Set percentage of cache to occupy with a copied pack.
*
* @param ratio
* percentage of cache to occupy with a copied pack.
* @return {@code this}

13
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsCachedPack.java

@ -50,7 +50,9 @@ import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation;
/** A DfsPackFile available for reuse as-is. */
/**
* A DfsPackFile available for reuse as-is.
*/
public class DfsCachedPack extends CachedPack {
private final DfsPackFile pack;
@ -58,21 +60,28 @@ public class DfsCachedPack extends CachedPack {
this.pack = pack;
}
/** @return the description of the pack. */
/**
* Get the description of the pack.
*
* @return the description of the pack.
*/
public DfsPackDescription getPackDescription() {
return pack.getPackDescription();
}
/** {@inheritDoc} */
@Override
public long getObjectCount() throws IOException {
return getPackDescription().getObjectCount();
}
/** {@inheritDoc} */
@Override
public long getDeltaCount() throws IOException {
return getPackDescription().getDeltaCount();
}
/** {@inheritDoc} */
@Override
public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) {
return ((DfsObjectRepresentation) rep).pack == pack;

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsConfig.java

@ -49,11 +49,13 @@ import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.StoredConfig;
final class DfsConfig extends StoredConfig {
/** {@inheritDoc} */
@Override
public void load() throws IOException, ConfigInvalidException {
clear();
}
/** {@inheritDoc} */
@Override
public void save() throws IOException {
// TODO actually store this configuration.

15
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java

@ -65,7 +65,9 @@ import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.revwalk.ObjectWalk;
import org.eclipse.jgit.revwalk.RevObject;
/** Verify the validity and connectivity of a DFS repository. */
/**
* Verify the validity and connectivity of a DFS repository.
*/
public class DfsFsck {
private final DfsRepository repo;
private final DfsObjDatabase objdb;
@ -90,7 +92,7 @@ public class DfsFsck {
* @param pm
* callback to provide progress feedback during the check.
* @return all errors about the repository.
* @throws IOException
* @throws java.io.IOException
* if encounters IO errors during the process.
*/
public FsckError check(ProgressMonitor pm) throws IOException {
@ -185,10 +187,13 @@ public class DfsFsck {
}
/**
* Whether fsck should bypass object validity and integrity checks and only
* check connectivity.
*
* @param connectivityOnly
* whether fsck should bypass object validity and integrity
* checks and only check connectivity. The default is
* {@code false}, meaning to run all checks.
* whether fsck should bypass object validity and integrity
* checks and only check connectivity. The default is
* {@code false}, meaning to run all checks.
*/
public void setConnectivityOnly(boolean connectivityOnly) {
this.connectivityOnly = connectivityOnly;

80
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java

@ -93,7 +93,9 @@ import org.eclipse.jgit.storage.pack.PackStatistics;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.io.CountingOutputStream;
/** Repack and garbage collect a repository. */
/**
* Repack and garbage collect a repository.
*/
public class DfsGarbageCollector {
private final DfsRepository repo;
private final RefDatabase refdb;
@ -147,12 +149,18 @@ public class DfsGarbageCollector {
packConfig.setIndexVersion(2);
}
/** @return configuration used to generate the new pack file. */
/**
* Get configuration used to generate the new pack file.
*
* @return configuration used to generate the new pack file.
*/
public PackConfig getPackConfig() {
return packConfig;
}
/**
* Set the new configuration to use when creating the pack file.
*
* @param newConfig
* the new configuration to use when creating the pack file.
* @return {@code this}
@ -163,6 +171,8 @@ public class DfsGarbageCollector {
}
/**
* Set configuration to write a reftable.
*
* @param cfg
* configuration to write a reftable. Reftable writing is
* disabled (default) when {@code cfg} is {@code null}.
@ -174,11 +184,14 @@ public class DfsGarbageCollector {
}
/**
* Whether the garbage collector should convert references to reftable.
*
* @param convert
* if true, {@link #setReftableConfig(ReftableConfig)} has been
* set non-null, and a GC reftable doesn't yet exist, the garbage
* collector will make one by scanning the existing references,
* and writing a new reftable. Default is {@code true}.
* if {@code true}, {@link #setReftableConfig(ReftableConfig)}
* has been set non-null, and a GC reftable doesn't yet exist,
* the garbage collector will make one by scanning the existing
* references, and writing a new reftable. Default is
* {@code true}.
* @return {@code this}
*/
public DfsGarbageCollector setConvertToReftable(boolean convert) {
@ -187,9 +200,13 @@ public class DfsGarbageCollector {
}
/**
* Whether the garbage collector will include tombstones for deleted
* references in the reftable.
*
* @param include
* if true, the garbage collector will include tombstones for
* deleted references in the reftable. Default is {@code false}.
* if {@code true}, the garbage collector will include tombstones
* for deleted references in the reftable. Default is
* {@code false}.
* @return {@code this}
*/
public DfsGarbageCollector setIncludeDeletes(boolean include) {
@ -202,9 +219,10 @@ public class DfsGarbageCollector {
*
* @param u
* minUpdateIndex for the initial reftable created by scanning
* {@link DfsRefDatabase#getRefs(String)}. Ignored unless caller
* has also set {@link #setReftableConfig(ReftableConfig)}.
* Defaults to {@code 1}. Must be {@code u >= 0}.
* {@link org.eclipse.jgit.internal.storage.dfs.DfsRefDatabase#getRefs(String)}.
* Ignored unless caller has also set
* {@link #setReftableConfig(ReftableConfig)}. Defaults to
* {@code 1}. Must be {@code u >= 0}.
* @return {@code this}
*/
public DfsGarbageCollector setReftableInitialMinUpdateIndex(long u) {
@ -217,9 +235,10 @@ public class DfsGarbageCollector {
*
* @param u
* maxUpdateIndex for the initial reftable created by scanning
* {@link DfsRefDatabase#getRefs(String)}. Ignored unless caller
* has also set {@link #setReftableConfig(ReftableConfig)}.
* Defaults to {@code 1}. Must be {@code u >= 0}.
* {@link org.eclipse.jgit.internal.storage.dfs.DfsRefDatabase#getRefs(String)}.
* Ignored unless caller has also set
* {@link #setReftableConfig(ReftableConfig)}. Defaults to
* {@code 1}. Must be {@code u >= 0}.
* @return {@code this}
*/
public DfsGarbageCollector setReftableInitialMaxUpdateIndex(long u) {
@ -227,7 +246,12 @@ public class DfsGarbageCollector {
return this;
}
/** @return garbage packs smaller than this size will be repacked. */
/**
* Get coalesce garbage limit
*
* @return coalesce garbage limit, packs smaller than this size will be
* repacked.
*/
public long getCoalesceGarbageLimit() {
return coalesceGarbageLimit;
}
@ -244,7 +268,8 @@ public class DfsGarbageCollector {
* reading and copying the objects.
* <p>
* If limit is set to 0 the UNREACHABLE_GARBAGE coalesce is disabled.<br>
* If limit is set to {@link Long#MAX_VALUE}, everything is coalesced.
* If limit is set to {@link java.lang.Long#MAX_VALUE}, everything is
* coalesced.
* <p>
* Keeping unreachable garbage prevents race conditions with repository
* changes that may suddenly need an object whose only copy was stored in
@ -260,6 +285,8 @@ public class DfsGarbageCollector {
}
/**
* Get time to live for garbage packs.
*
* @return garbage packs older than this limit (in milliseconds) will be
* pruned as part of the garbage collection process if the value is
* > 0, otherwise garbage packs are retained.
@ -300,7 +327,7 @@ public class DfsGarbageCollector {
* @return true if the repack was successful without race conditions. False
* if a race condition was detected and the repack should be run
* again later.
* @throws IOException
* @throws java.io.IOException
* a new pack cannot be created.
*/
public boolean pack(ProgressMonitor pm) throws IOException {
@ -488,21 +515,30 @@ public class DfsGarbageCollector {
return cal.getTimeInMillis();
}
/** @return all of the source packs that fed into this compaction. */
/**
* Get all of the source packs that fed into this compaction.
*
* @return all of the source packs that fed into this compaction.
*/
public Set<DfsPackDescription> getSourcePacks() {
return toPrune();
}
/** @return new packs created by this compaction. */
/**
* Get new packs created by this compaction.
*
* @return new packs created by this compaction.
*/
public List<DfsPackDescription> getNewPacks() {
return newPackDesc;
}
/**
* @return statistics corresponding to the {@link #getNewPacks()}.
* Get statistics corresponding to the {@link #getNewPacks()}.
* <p>
* The elements can be null if the stat is not available for the pack file.
*
* <p>The elements can be null if the stat is not available for the pack
* file.
* @return statistics corresponding to the {@link #getNewPacks()}.
*/
public List<PackStatistics> getNewPackStatistics() {
return newPackStats;

17
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java

@ -92,7 +92,9 @@ import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.io.CountingOutputStream;
import org.eclipse.jgit.util.sha1.SHA1;
/** Inserts objects into the DFS. */
/**
* Inserts objects into the DFS.
*/
public class DfsInserter extends ObjectInserter {
/** Always produce version 2 indexes, to get CRC data. */
private static final int INDEX_VERSION = 2;
@ -121,9 +123,12 @@ public class DfsInserter extends ObjectInserter {
}
/**
* Check existence
*
* @param check
* if false, will write out possibly-duplicate objects without
* first checking whether they exist in the repo; default is true.
* if {@code false}, will write out possibly-duplicate objects
* without first checking whether they exist in the repo; default
* is true.
*/
public void checkExisting(boolean check) {
checkExisting = check;
@ -133,16 +138,19 @@ public class DfsInserter extends ObjectInserter {
this.compression = compression;
}
/** {@inheritDoc} */
@Override
public DfsPackParser newPackParser(InputStream in) throws IOException {
return new DfsPackParser(db, this, in);
}
/** {@inheritDoc} */
@Override
public ObjectReader newReader() {
return new Reader();
}
/** {@inheritDoc} */
@Override
public ObjectId insert(int type, byte[] data, int off, int len)
throws IOException {
@ -159,6 +167,7 @@ public class DfsInserter extends ObjectInserter {
return endObject(id, offset);
}
/** {@inheritDoc} */
@Override
public ObjectId insert(int type, long len, InputStream in)
throws IOException {
@ -201,6 +210,7 @@ public class DfsInserter extends ObjectInserter {
return buf;
}
/** {@inheritDoc} */
@Override
public void flush() throws IOException {
if (packDsc == null)
@ -228,6 +238,7 @@ public class DfsInserter extends ObjectInserter {
clear();
}
/** {@inheritDoc} */
@Override
public void close() {
if (packOut != null) {

60
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java

@ -61,7 +61,11 @@ import org.eclipse.jgit.lib.ObjectDatabase;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
/** Manages objects stored in {@link DfsPackFile} on a storage system. */
/**
* Manages objects stored in
* {@link org.eclipse.jgit.internal.storage.dfs.DfsPackFile} on a storage
* system.
*/
public abstract class DfsObjDatabase extends ObjectDatabase {
private static final PackList NO_PACKS = new PackList(
new DfsPackFile[0],
@ -157,7 +161,6 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
*
* @param repository
* repository owning this object database.
*
* @param options
* how readers should access the object database.
*/
@ -168,16 +171,22 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
this.readerOptions = options;
}
/** @return configured reader options, such as read-ahead. */
/**
* Get configured reader options, such as read-ahead.
*
* @return configured reader options, such as read-ahead.
*/
public DfsReaderOptions getReaderOptions() {
return readerOptions;
}
/** {@inheritDoc} */
@Override
public DfsReader newReader() {
return new DfsReader(this);
}
/** {@inheritDoc} */
@Override
public ObjectInserter newInserter() {
return new DfsInserter(this);
@ -188,7 +197,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
*
* @return list of available packs. The returned array is shared with the
* implementation and must not be modified by the caller.
* @throws IOException
* @throws java.io.IOException
* the pack list cannot be initialized.
*/
public DfsPackFile[] getPacks() throws IOException {
@ -200,7 +209,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
*
* @return list of available reftables. The returned array is shared with
* the implementation and must not be modified by the caller.
* @throws IOException
* @throws java.io.IOException
* the pack list cannot be initialized.
*/
public DfsReftable[] getReftables() throws IOException {
@ -213,14 +222,18 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* @return list of available packs, with some additional metadata. The
* returned array is shared with the implementation and must not be
* modified by the caller.
* @throws IOException
* @throws java.io.IOException
* the pack list cannot be initialized.
*/
public PackList getPackList() throws IOException {
return scanPacks(NO_PACKS);
}
/** @return repository owning this object database. */
/**
* Get repository owning this object database.
*
* @return repository owning this object database.
*/
protected DfsRepository getRepository() {
return repository;
}
@ -267,7 +280,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* @param avoidUnreachableObjects
* if true, ignore objects that are unreachable.
* @return true if the specified object is stored in this database.
* @throws IOException
* @throws java.io.IOException
* the object store cannot be accessed.
*/
public boolean has(AnyObjectId objectId, boolean avoidUnreachableObjects)
@ -285,7 +298,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* where the pack stream is created.
* @return a unique name for the pack file. Must not collide with any other
* pack file name in the same DFS.
* @throws IOException
* @throws java.io.IOException
* a new unique pack description cannot be generated.
*/
protected abstract DfsPackDescription newPack(PackSource source)
@ -299,7 +312,8 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* {@code newPack(source).setEstimatedPackSize(estimatedPackSize)}. But the
* clients can override this method to use the given
* {@code estomatedPackSize} value more efficiently in the process of
* creating a new {@link DfsPackDescription} object.
* creating a new
* {@link org.eclipse.jgit.internal.storage.dfs.DfsPackDescription} object.
*
* @param source
* where the pack stream is created.
@ -307,7 +321,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* the estimated size of the pack.
* @return a unique name for the pack file. Must not collide with any other
* pack file name in the same DFS.
* @throws IOException
* @throws java.io.IOException
* a new unique pack description cannot be generated.
*/
protected DfsPackDescription newPack(PackSource source,
@ -338,7 +352,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* description of the new packs.
* @param replaces
* if not null, list of packs to remove.
* @throws IOException
* @throws java.io.IOException
* the packs cannot be committed. On failure a rollback must
* also be attempted by the caller.
*/
@ -352,12 +366,11 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* Implementation of pack commit.
*
* @see #commitPack(Collection, Collection)
*
* @param desc
* description of the new packs.
* @param replaces
* if not null, list of packs to remove.
* @throws IOException
* @throws java.io.IOException
* the packs cannot be committed.
*/
protected abstract void commitPackImpl(Collection<DfsPackDescription> desc,
@ -388,7 +401,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* DfsPackDescription objects.
*
* @return available packs. May be empty if there are no packs.
* @throws IOException
* @throws java.io.IOException
* the packs cannot be listed and the object database is not
* functional to the caller.
*/
@ -405,9 +418,9 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* @param ext
* file extension that will be read i.e "pack" or "idx".
* @return channel to read the file.
* @throws FileNotFoundException
* @throws java.io.FileNotFoundException
* the file does not exist.
* @throws IOException
* @throws java.io.IOException
* the file cannot be opened.
*/
protected abstract ReadableChannel openFile(
@ -424,7 +437,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
* @param ext
* file extension that will be written i.e "pack" or "idx".
* @return channel to write the file.
* @throws IOException
* @throws java.io.IOException
* the file cannot be opened.
*/
protected abstract DfsOutputStream writeFile(
@ -565,7 +578,11 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
return forReuse;
}
/** @return comparator to sort {@link DfsReftable} by priority. */
/**
* Get comparator to sort {@link DfsReftable} by priority.
*
* @return comparator to sort {@link DfsReftable} by priority.
*/
protected Comparator<DfsReftable> reftableComparator() {
return (fa, fb) -> {
DfsPackDescription a = fa.getPackDescription();
@ -593,11 +610,14 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
return s != null ? s.category : 0;
}
/** Clears the cached list of packs, forcing them to be scanned again. */
/**
* Clears the cached list of packs, forcing them to be scanned again.
*/
protected void clearCache() {
packList.set(NO_PACKS);
}
/** {@inheritDoc} */
@Override
public void close() {
packList.set(NO_PACKS);

4
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjectRepresentation.java

@ -57,21 +57,25 @@ class DfsObjectRepresentation extends StoredObjectRepresentation {
this.pack = pack;
}
/** {@inheritDoc} */
@Override
public int getFormat() {
return format;
}
/** {@inheritDoc} */
@Override
public int getWeight() {
return (int) Math.min(length, Integer.MAX_VALUE);
}
/** {@inheritDoc} */
@Override
public ObjectId getDeltaBase() {
return baseId;
}
/** {@inheritDoc} */
@Override
public boolean wasDeltaAttempted() {
switch (pack.getPackDescription().getPackSource()) {

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjectToPack.java

@ -72,12 +72,14 @@ class DfsObjectToPack extends ObjectToPack {
setExtendedFlag(FLAG_FOUND);
}
/** {@inheritDoc} */
@Override
protected void clearReuseAsIs() {
super.clearReuseAsIs();
pack = null;
}
/** {@inheritDoc} */
@Override
public void select(StoredObjectRepresentation ref) {
DfsObjectRepresentation ptr = (DfsObjectRepresentation) ref;

4
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsOutputStream.java

@ -72,11 +72,13 @@ public abstract class DfsOutputStream extends OutputStream {
return 0;
}
/** {@inheritDoc} */
@Override
public void write(int b) throws IOException {
write(new byte[] { (byte) b });
}
/** {@inheritDoc} */
@Override
public abstract void write(byte[] buf, int off, int len) throws IOException;
@ -91,7 +93,7 @@ public abstract class DfsOutputStream extends OutputStream {
* buffer to populate. Up to {@code buf.remaining()} bytes will
* be read from {@code position}.
* @return number of bytes actually read.
* @throws IOException
* @throws java.io.IOException
* reading is not supported, or the read cannot be performed due
* to DFS errors.
*/

24
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java

@ -126,6 +126,8 @@ public class DfsPackCompactor {
}
/**
* Set configuration to write a reftable.
*
* @param cfg
* configuration to write a reftable. Reftable compacting is
* disabled (default) when {@code cfg} is {@code null}.
@ -172,7 +174,7 @@ public class DfsPackCompactor {
* ones are omitted.
*
* @return {@code this}
* @throws IOException
* @throws java.io.IOException
* existing packs cannot be read.
*/
public DfsPackCompactor autoAdd() throws IOException {
@ -215,7 +217,7 @@ public class DfsPackCompactor {
* @param pack
* objects to not include.
* @return {@code this}.
* @throws IOException
* @throws java.io.IOException
* pack index cannot be loaded.
*/
public DfsPackCompactor exclude(DfsPackFile pack) throws IOException {
@ -232,7 +234,7 @@ public class DfsPackCompactor {
* @param pm
* progress monitor to receive updates on as packing may take a
* while, depending on the size of the repository.
* @throws IOException
* @throws java.io.IOException
* the packs cannot be compacted.
*/
public void compact(ProgressMonitor pm) throws IOException {
@ -331,7 +333,11 @@ public class DfsPackCompactor {
}
}
/** @return all of the source packs that fed into this compaction. */
/**
* Get all of the source packs that fed into this compaction.
*
* @return all of the source packs that fed into this compaction.
*/
public Collection<DfsPackDescription> getSourcePacks() {
Set<DfsPackDescription> src = new HashSet<>();
for (DfsPackFile pack : srcPacks) {
@ -343,7 +349,11 @@ public class DfsPackCompactor {
return src;
}
/** @return new packs created by this compaction. */
/**
* Get new packs created by this compaction.
*
* @return new packs created by this compaction.
*/
public List<DfsPackDescription> getNewPacks() {
return outDesc != null
? Collections.singletonList(outDesc)
@ -351,9 +361,11 @@ public class DfsPackCompactor {
}
/**
* Get statistics corresponding to the {@link #getNewPacks()}.
* May be null if statistics are not available.
*
* @return statistics corresponding to the {@link #getNewPacks()}.
*
* <p>The element may be null if the stat is not available.
*/
public List<PackStatistics> getNewPackStatistics() {
return outDesc != null

102
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java

@ -104,7 +104,11 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
blockSizeMap = new int[extCnt];
}
/** @return description of the repository. */
/**
* Get description of the repository.
*
* @return description of the repository.
*/
public DfsRepositoryDescription getRepositoryDescription() {
return repoDesc;
}
@ -120,15 +124,19 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Whether the pack file extension is known to exist.
*
* @param ext
* the file extension
* @return whether the pack file extensions is known to exist.
* @return whether the pack file extension is known to exist.
*/
public boolean hasFileExt(PackExt ext) {
return (extensions & ext.getBit()) != 0;
}
/**
* Get file name
*
* @param ext
* the file extension
* @return name of the file.
@ -138,6 +146,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Get cache key for use by the block cache.
*
* @param ext
* the file extension.
* @return cache key for use by the block cache.
@ -147,12 +157,18 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
ext);
}
/** @return the source of the pack. */
/**
* Get the source of the pack.
*
* @return the source of the pack.
*/
public PackSource getPackSource() {
return packSource;
}
/**
* Set the source of the pack.
*
* @param source
* the source of the pack.
* @return {@code this}
@ -162,12 +178,18 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return time the pack was created, in milliseconds. */
/**
* Get time the pack was created, in milliseconds.
*
* @return time the pack was created, in milliseconds.
*/
public long getLastModified() {
return lastModified;
}
/**
* Set time the pack was created, in milliseconds.
*
* @param timeMillis
* time the pack was created, in milliseconds. 0 if not known.
* @return {@code this}
@ -177,14 +199,20 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return minUpdateIndex for the reftable, if present. */
/**
* Get minUpdateIndex for the reftable, if present.
*
* @return minUpdateIndex for the reftable, if present.
*/
public long getMinUpdateIndex() {
return minUpdateIndex;
}
/**
* Set minUpdateIndex for the reftable.
*
* @param min
* minUpdateIndex for the reftable, or 0.
* minUpdateIndex for the reftable.
* @return {@code this}
*/
public DfsPackDescription setMinUpdateIndex(long min) {
@ -192,14 +220,20 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return maxUpdateIndex for the reftable, if present. */
/**
* Get maxUpdateIndex for the reftable, if present.
*
* @return maxUpdateIndex for the reftable, if present.
*/
public long getMaxUpdateIndex() {
return maxUpdateIndex;
}
/**
* Set maxUpdateIndex for the reftable.
*
* @param max
* maxUpdateIndex for the reftable, or 0.
* maxUpdateIndex for the reftable.
* @return {@code this}
*/
public DfsPackDescription setMaxUpdateIndex(long max) {
@ -208,6 +242,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Set size of the file in bytes.
*
* @param ext
* the file extension.
* @param bytes
@ -225,6 +261,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Get size of the file, in bytes.
*
* @param ext
* the file extension.
* @return size of the file, in bytes. If 0 the file size is not yet known.
@ -235,6 +273,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Get blockSize of the file, in bytes.
*
* @param ext
* the file extension.
* @return blockSize of the file, in bytes. If 0 the blockSize size is not
@ -246,6 +286,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Set blockSize of the file, in bytes.
*
* @param ext
* the file extension.
* @param blockSize
@ -263,6 +305,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Set estimated size of the .pack file in bytes.
*
* @param estimatedPackSize
* estimated size of the .pack file in bytes. If 0 the pack file
* size is unknown.
@ -274,6 +318,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Get estimated size of the .pack file in bytes.
*
* @return estimated size of the .pack file in bytes. If 0 the pack file
* size is unknown.
*/
@ -281,12 +327,18 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return estimatedPackSize;
}
/** @return number of objects in the pack. */
/**
* Get number of objects in the pack.
*
* @return number of objects in the pack.
*/
public long getObjectCount() {
return objectCount;
}
/**
* Set number of objects in the pack.
*
* @param cnt
* number of objects in the pack.
* @return {@code this}
@ -296,12 +348,18 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return number of delta compressed objects in the pack. */
/**
* Get number of delta compressed objects in the pack.
*
* @return number of delta compressed objects in the pack.
*/
public long getDeltaCount() {
return deltaCount;
}
/**
* Set number of delta compressed objects in the pack.
*
* @param cnt
* number of delta compressed objects in the pack.
* @return {@code this}
@ -312,6 +370,8 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* Get statistics from PackWriter, if the pack was built with it.
*
* @return statistics from PackWriter, if the pack was built with it.
* Generally this is only available for packs created by
* DfsGarbageCollector or DfsPackCompactor, and only when the pack
@ -329,7 +389,11 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return stats from the sibling reftable, if created. */
/**
* Get stats from the sibling reftable, if created.
*
* @return stats from the sibling reftable, if created.
*/
public ReftableWriter.Stats getReftableStats() {
return refStats;
}
@ -353,12 +417,18 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** @return the version of the index file written. */
/**
* Get the version of the index file written.
*
* @return the version of the index file written.
*/
public int getIndexVersion() {
return indexVersion;
}
/**
* Set the version of the index file written.
*
* @param version
* the version of the index file written.
* @return {@code this}
@ -368,11 +438,13 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return packName.hashCode();
}
/** {@inheritDoc} */
@Override
public boolean equals(Object b) {
if (b instanceof DfsPackDescription) {
@ -384,15 +456,14 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
/**
* {@inheritDoc}
* <p>
* Sort packs according to the optimal lookup ordering.
* <p>
* This method tries to position packs in the order readers should examine
* them when looking for objects by SHA-1. The default tries to sort packs
* with more recent modification dates before older packs, and packs with
* fewer objects before packs with more objects.
*
* @param b
* the other pack.
*/
@Override
public int compareTo(DfsPackDescription b) {
@ -438,6 +509,7 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
}
}
/** {@inheritDoc} */
@Override
public String toString() {
return getFileName(PackExt.PACK);

12
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java

@ -133,12 +133,18 @@ public final class DfsPackFile extends BlockBasedFile {
length = sz > 0 ? sz : -1;
}
/** @return description that was originally used to configure this pack file. */
/**
* Get description that was originally used to configure this pack file.
*
* @return description that was originally used to configure this pack file.
*/
public DfsPackDescription getPackDescription() {
return desc;
}
/**
* Whether the pack index file is loaded and cached in memory.
*
* @return whether the pack index file is loaded and cached in memory.
*/
public boolean isIndexLoaded() {
@ -160,7 +166,7 @@ public final class DfsPackFile extends BlockBasedFile {
* reader context to support reading from the backing store if
* the index is not already loaded in memory.
* @return the PackIndex.
* @throws IOException
* @throws java.io.IOException
* the pack index is not available, or is corrupt.
*/
public PackIndex getPackIndex(DfsReader ctx) throws IOException {
@ -350,7 +356,7 @@ public final class DfsPackFile extends BlockBasedFile {
* @param id
* object to be located.
* @return true if the object exists in this pack; false if it does not.
* @throws IOException
* @throws java.io.IOException
* the pack index is not available, or is corrupt.
*/
public boolean hasObject(DfsReader ctx, AnyObjectId id) throws IOException {

28
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java

@ -63,7 +63,9 @@ import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.transport.PackedObjectInfo;
/** Parses a pack stream into the DFS, by creating a new pack and index. */
/**
* Parses a pack stream into the DFS, by creating a new pack and index.
*/
public class DfsPackParser extends PackParser {
private final DfsObjDatabase objdb;
@ -132,6 +134,7 @@ public class DfsPackParser extends PackParser {
this.packDigest = Constants.newMessageDigest();
}
/** {@inheritDoc} */
@Override
public PackLock parse(ProgressMonitor receiving, ProgressMonitor resolving)
throws IOException {
@ -193,11 +196,16 @@ public class DfsPackParser extends PackParser {
}
}
/** @return description of the imported pack, if one was made. */
/**
* Get description of the imported pack, if one was made.
*
* @return description of the imported pack, if one was made.
*/
public DfsPackDescription getPackDescription() {
return packDsc;
}
/** {@inheritDoc} */
@Override
protected void onPackHeader(long objectCount) throws IOException {
if (objectCount == 0) {
@ -219,29 +227,34 @@ public class DfsPackParser extends PackParser {
currBuf = new byte[blockSize];
}
/** {@inheritDoc} */
@Override
protected void onBeginWholeObject(long streamPosition, int type,
long inflatedSize) throws IOException {
crc.reset();
}
/** {@inheritDoc} */
@Override
protected void onEndWholeObject(PackedObjectInfo info) throws IOException {
info.setCRC((int) crc.getValue());
}
/** {@inheritDoc} */
@Override
protected void onBeginOfsDelta(long streamPosition,
long baseStreamPosition, long inflatedSize) throws IOException {
crc.reset();
}
/** {@inheritDoc} */
@Override
protected void onBeginRefDelta(long streamPosition, AnyObjectId baseId,
long inflatedSize) throws IOException {
crc.reset();
}
/** {@inheritDoc} */
@Override
protected UnresolvedDelta onEndDelta() throws IOException {
UnresolvedDelta delta = new UnresolvedDelta();
@ -249,24 +262,28 @@ public class DfsPackParser extends PackParser {
return delta;
}
/** {@inheritDoc} */
@Override
protected void onInflatedObjectData(PackedObjectInfo obj, int typeCode,
byte[] data) throws IOException {
// DfsPackParser ignores this event.
}
/** {@inheritDoc} */
@Override
protected void onObjectHeader(Source src, byte[] raw, int pos, int len)
throws IOException {
crc.update(raw, pos, len);
}
/** {@inheritDoc} */
@Override
protected void onObjectData(Source src, byte[] raw, int pos, int len)
throws IOException {
crc.update(raw, pos, len);
}
/** {@inheritDoc} */
@Override
protected void onStoreStream(byte[] raw, int pos, int len)
throws IOException {
@ -313,6 +330,7 @@ public class DfsPackParser extends PackParser {
return v;
}
/** {@inheritDoc} */
@Override
protected void onPackFooter(byte[] hash) throws IOException {
// The base class will validate the original hash matches
@ -322,6 +340,7 @@ public class DfsPackParser extends PackParser {
packHash = hash;
}
/** {@inheritDoc} */
@Override
protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
ObjectTypeAndSize info) throws IOException {
@ -330,6 +349,7 @@ public class DfsPackParser extends PackParser {
return readObjectHeader(info);
}
/** {@inheritDoc} */
@Override
protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
ObjectTypeAndSize info) throws IOException {
@ -338,6 +358,7 @@ public class DfsPackParser extends PackParser {
return readObjectHeader(info);
}
/** {@inheritDoc} */
@Override
protected int readDatabase(byte[] dst, int pos, int cnt) throws IOException {
if (cnt == 0)
@ -393,11 +414,13 @@ public class DfsPackParser extends PackParser {
return (pos / blockSize) * blockSize;
}
/** {@inheritDoc} */
@Override
protected boolean checkCRC(int oldCRC) {
return oldCRC == (int) crc.getValue();
}
/** {@inheritDoc} */
@Override
protected boolean onAppendBase(final int typeCode, final byte[] data,
final PackedObjectInfo info) throws IOException {
@ -437,6 +460,7 @@ public class DfsPackParser extends PackParser {
return true;
}
/** {@inheritDoc} */
@Override
protected void onEndThinPack() throws IOException {
// Normally when a thin pack is closed the pack header gets

7
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPacksChangedEvent.java

@ -45,14 +45,19 @@ package org.eclipse.jgit.internal.storage.dfs;
import org.eclipse.jgit.events.RepositoryEvent;
/** Describes a change to the list of packs in a {@link DfsRepository}. */
/**
* Describes a change to the list of packs in a
* {@link org.eclipse.jgit.internal.storage.dfs.DfsRepository}.
*/
public class DfsPacksChangedEvent
extends RepositoryEvent<DfsPacksChangedListener> {
/** {@inheritDoc} */
@Override
public Class<DfsPacksChangedListener> getListenerType() {
return DfsPacksChangedListener.class;
}
/** {@inheritDoc} */
@Override
public void dispatch(DfsPacksChangedListener listener) {
listener.onPacksChanged(this);

4
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPacksChangedListener.java

@ -45,7 +45,9 @@ package org.eclipse.jgit.internal.storage.dfs;
import org.eclipse.jgit.events.RepositoryListener;
/** Receives {@link DfsPacksChangedEvent}s. */
/**
* Receives {@link org.eclipse.jgit.internal.storage.dfs.DfsPacksChangedEvent}s.
*/
public interface DfsPacksChangedListener extends RepositoryListener {
/**
* Invoked when all packs in a repository are listed.

31
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java

@ -92,8 +92,8 @@ import org.eclipse.jgit.util.BlockList;
/**
* Reader to access repository content through.
* <p>
* See the base {@link ObjectReader} documentation for details. Notably, a
* reader is not thread safe.
* See the base {@link org.eclipse.jgit.lib.ObjectReader} documentation for
* details. Notably, a reader is not thread safe.
*/
public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private static final int MAX_RESOLVE_MATCHES = 256;
@ -133,16 +133,19 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return baseCache;
}
/** {@inheritDoc} */
@Override
public ObjectReader newReader() {
return db.newReader();
}
/** {@inheritDoc} */
@Override
public void setAvoidUnreachableObjects(boolean avoid) {
avoidUnreachable = avoid;
}
/** {@inheritDoc} */
@Override
public BitmapIndex getBitmapIndex() throws IOException {
for (DfsPackFile pack : db.getPacks()) {
@ -153,6 +156,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return null;
}
/** {@inheritDoc} */
@Override
public Collection<CachedPack> getCachedPacksAndUpdate(
BitmapBuilder needBitmap) throws IOException {
@ -165,6 +169,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override
public Collection<ObjectId> resolve(AbbreviatedObjectId id)
throws IOException {
@ -193,6 +198,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
}
/** {@inheritDoc} */
@Override
public boolean has(AnyObjectId objectId) throws IOException {
if (last != null
@ -222,6 +228,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return false;
}
/** {@inheritDoc} */
@Override
public ObjectLoader open(AnyObjectId objectId, int typeHint)
throws MissingObjectException, IncorrectObjectTypeException,
@ -276,6 +283,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return null;
}
/** {@inheritDoc} */
@Override
public Set<ObjectId> getShallowCommits() {
return Collections.emptySet();
@ -385,6 +393,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return avoidUnreachable && pack.isGarbage();
}
/** {@inheritDoc} */
@Override
public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
Iterable<T> objectIds, final boolean reportMissing) {
@ -444,6 +453,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
};
}
/** {@inheritDoc} */
@Override
public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
Iterable<T> objectIds, final boolean reportMissing) {
@ -505,6 +515,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
};
}
/** {@inheritDoc} */
@Override
public long getObjectSize(AnyObjectId objectId, int typeHint)
throws MissingObjectException, IncorrectObjectTypeException,
@ -550,6 +561,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return -1;
}
/** {@inheritDoc} */
@Override
public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
return new DfsObjectToPack(objectId, type);
@ -631,6 +643,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return tmp;
}
/** {@inheritDoc} */
@Override
public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
boolean validate) throws IOException,
@ -639,6 +652,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
src.pack.copyAsIs(out, src, validate, this);
}
/** {@inheritDoc} */
@Override
public void writeObjects(PackOutputStream out, List<ObjectToPack> list)
throws IOException {
@ -646,6 +660,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
out.writeObject(otp);
}
/** {@inheritDoc} */
@Override
public void copyPackAsIs(PackOutputStream out, CachedPack pack)
throws IOException {
@ -771,12 +786,20 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
block = null;
}
/** @return IO statistics accumulated by this reader. */
/**
* Get IO statistics accumulated by this reader.
*
* @return IO statistics accumulated by this reader.
*/
public DfsReaderIoStats getIoStats() {
return new DfsReaderIoStats(stats);
}
/** Release the current window cursor. */
/**
* {@inheritDoc}
* <p>
* Release the current window cursor.
*/
@Override
public void close() {
last = null;

64
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java

@ -43,7 +43,9 @@
package org.eclipse.jgit.internal.storage.dfs;
/** IO statistics for a {@link DfsReader}. */
/**
* IO statistics for a {@link org.eclipse.jgit.internal.storage.dfs.DfsReader}.
*/
public class DfsReaderIoStats {
/** POJO to accumulate IO statistics. */
public static class Accumulator {
@ -87,52 +89,92 @@ public class DfsReaderIoStats {
this.stats = stats;
}
/** @return number of times the reader explicitly called scanPacks. */
/**
* Get number of times the reader explicitly called scanPacks.
*
* @return number of times the reader explicitly called scanPacks.
*/
public long getScanPacks() {
return stats.scanPacks;
}
/** @return total number of complete pack indexes read into memory. */
/**
* Get total number of complete pack indexes read into memory.
*
* @return total number of complete pack indexes read into memory.
*/
public long getReadPackIndexCount() {
return stats.readIdx;
}
/** @return total number of complete bitmap indexes read into memory. */
/**
* Get total number of complete bitmap indexes read into memory.
*
* @return total number of complete bitmap indexes read into memory.
*/
public long getReadBitmapIndexCount() {
return stats.readBitmap;
}
/** @return total number of bytes read from indexes. */
/**
* Get total number of bytes read from indexes.
*
* @return total number of bytes read from indexes.
*/
public long getReadIndexBytes() {
return stats.readIdxBytes;
}
/** @return total microseconds spent reading pack or bitmap indexes. */
/**
* Get total microseconds spent reading pack or bitmap indexes.
*
* @return total microseconds spent reading pack or bitmap indexes.
*/
public long getReadIndexMicros() {
return stats.readIdxMicros;
}
/** @return total number of block cache hits. */
/**
* Get total number of block cache hits.
*
* @return total number of block cache hits.
*/
public long getBlockCacheHits() {
return stats.blockCacheHit;
}
/** @return total number of discrete blocks read from pack file(s). */
/**
* Get total number of discrete blocks read from pack file(s).
*
* @return total number of discrete blocks read from pack file(s).
*/
public long getReadBlocksCount() {
return stats.readBlock;
}
/** @return total number of compressed bytes read as block sized units. */
/**
* Get total number of compressed bytes read as block sized units.
*
* @return total number of compressed bytes read as block sized units.
*/
public long getReadBlocksBytes() {
return stats.readBlockBytes;
}
/** @return total microseconds spent reading blocks. */
/**
* Get total microseconds spent reading blocks.
*
* @return total microseconds spent reading blocks.
*/
public long getReadBlocksMicros() {
return stats.readBlockMicros;
}
/** @return total number of bytes decompressed. */
/**
* Get total number of bytes decompressed.
*
* @return total number of bytes decompressed.
*/
public long getInflatedBytes() {
return stats.inflatedBytes;
}

28
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java

@ -52,7 +52,9 @@ import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_FILE_TRESHO
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.storage.pack.PackConfig;
/** Options controlling how objects are read from a DFS stored repository. */
/**
* Options controlling how objects are read from a DFS stored repository.
*/
public class DfsReaderOptions {
/** 1024 (number of bytes in one kibibyte/kilobyte) */
public static final int KiB = 1024;
@ -65,13 +67,19 @@ public class DfsReaderOptions {
private int streamPackBufferSize;
/** Create a default reader configuration. */
/**
* Create a default reader configuration.
*/
public DfsReaderOptions() {
setDeltaBaseCacheLimit(10 * MiB);
setStreamFileThreshold(PackConfig.DEFAULT_BIG_FILE_THRESHOLD);
}
/** @return maximum number of bytes to hold in per-reader DeltaBaseCache. */
/**
* Get maximum number of bytes to hold in per-reader DeltaBaseCache.
*
* @return maximum number of bytes to hold in per-reader DeltaBaseCache.
*/
public int getDeltaBaseCacheLimit() {
return deltaBaseCacheLimit;
}
@ -88,12 +96,18 @@ public class DfsReaderOptions {
return this;
}
/** @return the size threshold beyond which objects must be streamed. */
/**
* Get the size threshold beyond which objects must be streamed.
*
* @return the size threshold beyond which objects must be streamed.
*/
public int getStreamFileThreshold() {
return streamFileThreshold;
}
/**
* Set new byte limit for objects that must be streamed.
*
* @param newLimit
* new byte limit for objects that must be streamed. Objects
* smaller than this size can be obtained as a contiguous byte
@ -107,6 +121,9 @@ public class DfsReaderOptions {
}
/**
* Get number of bytes to use for buffering when streaming a pack file
* during copying.
*
* @return number of bytes to use for buffering when streaming a pack file
* during copying. If 0 the block size of the pack is used.
*/
@ -115,6 +132,9 @@ public class DfsReaderOptions {
}
/**
* Set new buffer size in bytes for buffers used when streaming pack files
* during copying.
*
* @param bufsz
* new buffer size in bytes for buffers used when streaming pack
* files during copying.

28
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefDatabase.java

@ -64,7 +64,10 @@ import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.util.RefList;
import org.eclipse.jgit.util.RefMap;
/** */
/**
* Abstract DfsRefDatabase class.
*
*/
public abstract class DfsRefDatabase extends RefDatabase {
private final DfsRepository repository;
@ -81,7 +84,11 @@ public abstract class DfsRefDatabase extends RefDatabase {
this.cache = new AtomicReference<>();
}
/** @return the repository the database holds the references of. */
/**
* Get the repository the database holds the references of.
*
* @return the repository the database holds the references of.
*/
protected DfsRepository getRepository() {
return repository;
}
@ -90,6 +97,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return 0 < read().size();
}
/** {@inheritDoc} */
@Override
public Ref exactRef(String name) throws IOException {
RefCache curr = read();
@ -97,6 +105,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return ref != null ? resolve(ref, 0, curr.ids) : null;
}
/** {@inheritDoc} */
@Override
public Ref getRef(String needle) throws IOException {
RefCache curr = read();
@ -110,11 +119,13 @@ public abstract class DfsRefDatabase extends RefDatabase {
return null;
}
/** {@inheritDoc} */
@Override
public List<Ref> getAdditionalRefs() {
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override
public Map<String, Ref> getRefs(String prefix) throws IOException {
RefCache curr = read();
@ -161,6 +172,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return new SymbolicRef(ref.getName(), dst);
}
/** {@inheritDoc} */
@Override
public Ref peel(Ref ref) throws IOException {
final Ref oldLeaf = ref.getLeaf();
@ -207,6 +219,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return leaf;
}
/** {@inheritDoc} */
@Override
public RefUpdate newUpdate(String refName, boolean detach)
throws IOException {
@ -223,6 +236,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return update;
}
/** {@inheritDoc} */
@Override
public RefRename newRename(String fromName, String toName)
throws IOException {
@ -231,6 +245,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
return new DfsRefRename(src, dst);
}
/** {@inheritDoc} */
@Override
public boolean isNameConflicting(String refName) throws IOException {
RefList<Ref> all = read().ids;
@ -252,16 +267,19 @@ public abstract class DfsRefDatabase extends RefDatabase {
return false;
}
/** {@inheritDoc} */
@Override
public void create() {
// Nothing to do.
}
/** {@inheritDoc} */
@Override
public void refresh() {
clearCache();
}
/** {@inheritDoc} */
@Override
public void close() {
clearCache();
@ -304,7 +322,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
* Read all known references in the repository.
*
* @return all current references of the repository.
* @throws IOException
* @throws java.io.IOException
* references cannot be accessed.
*/
protected abstract RefCache scanAllRefs() throws IOException;
@ -329,7 +347,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
* @param newRef
* new reference to store.
* @return true if the put was successful; false otherwise.
* @throws IOException
* @throws java.io.IOException
* the reference cannot be put due to a system error.
*/
protected abstract boolean compareAndPut(Ref oldRef, Ref newRef)
@ -341,7 +359,7 @@ public abstract class DfsRefDatabase extends RefDatabase {
* @param oldRef
* the old reference information that was previously read.
* @return true if the remove was successful; false otherwise.
* @throws IOException
* @throws java.io.IOException
* the reference could not be removed due to a system error.
*/
protected abstract boolean compareAndRemove(Ref oldRef) throws IOException;

1
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefRename.java

@ -55,6 +55,7 @@ final class DfsRefRename extends RefRename {
super(src, dst);
}
/** {@inheritDoc} */
@Override
protected Result doRename() throws IOException {
// TODO Correctly handle renaming foo/bar to foo.

8
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRefUpdate.java

@ -66,16 +66,19 @@ final class DfsRefUpdate extends RefUpdate {
this.refdb = refdb;
}
/** {@inheritDoc} */
@Override
protected DfsRefDatabase getRefDatabase() {
return refdb;
}
/** {@inheritDoc} */
@Override
protected DfsRepository getRepository() {
return refdb.getRepository();
}
/** {@inheritDoc} */
@Override
protected boolean tryLock(boolean deref) throws IOException {
dstRef = getRef();
@ -90,11 +93,13 @@ final class DfsRefUpdate extends RefUpdate {
return true;
}
/** {@inheritDoc} */
@Override
protected void unlock() {
// No state is held while "locked".
}
/** {@inheritDoc} */
@Override
public Result update(RevWalk walk) throws IOException {
try {
@ -105,6 +110,7 @@ final class DfsRefUpdate extends RefUpdate {
}
}
/** {@inheritDoc} */
@Override
protected Result doUpdate(Result desiredResult) throws IOException {
ObjectIdRef newRef;
@ -129,6 +135,7 @@ final class DfsRefUpdate extends RefUpdate {
return Result.LOCK_FAILURE;
}
/** {@inheritDoc} */
@Override
protected Result doDelete(Result desiredResult) throws IOException {
if (getRefDatabase().compareAndRemove(dstRef)) {
@ -138,6 +145,7 @@ final class DfsRefUpdate extends RefUpdate {
return Result.LOCK_FAILURE;
}
/** {@inheritDoc} */
@Override
protected Result doLink(String target) throws IOException {
final SymbolicRef newRef = new SymbolicRef(

12
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java

@ -51,7 +51,9 @@ import java.nio.ByteBuffer;
import org.eclipse.jgit.internal.storage.io.BlockSource;
import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
/** A reftable stored in {@link DfsBlockCache}. */
/**
* A reftable stored in {@link org.eclipse.jgit.internal.storage.dfs.DfsBlockCache}.
*/
public class DfsReftable extends BlockBasedFile {
/**
* Construct a reader for an existing reftable.
@ -83,7 +85,11 @@ public class DfsReftable extends BlockBasedFile {
length = sz > 0 ? sz : -1;
}
/** @return description that was originally used to configure this file. */
/**
* Get description that was originally used to configure this file.
*
* @return description that was originally used to configure this file.
*/
public DfsPackDescription getPackDescription() {
return desc;
}
@ -96,7 +102,7 @@ public class DfsReftable extends BlockBasedFile {
* @param ctx
* reader to access the DFS storage.
* @return cursor to read the table; caller must close.
* @throws IOException
* @throws java.io.IOException
* table cannot be opened.
*/
public ReftableReader open(DfsReader ctx) throws IOException {

39
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java

@ -49,7 +49,6 @@ import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.reftable.MergedReftable;
import org.eclipse.jgit.internal.storage.reftable.RefCursor;
import org.eclipse.jgit.internal.storage.reftable.Reftable;
@ -64,12 +63,15 @@ import org.eclipse.jgit.util.RefList;
import org.eclipse.jgit.util.RefMap;
/**
* A {@link DfsRefDatabase} that uses reftable for storage.
* A {@link org.eclipse.jgit.internal.storage.dfs.DfsRefDatabase} that uses
* reftable for storage.
* <p>
* A {@code DfsRefDatabase} instance is thread-safe.
* <p>
* Implementors may wish to use {@link DfsPackDescription#getMaxUpdateIndex()}
* as the primary key identifier for a {@link PackExt#REFTABLE} only pack
* Implementors may wish to use
* {@link org.eclipse.jgit.internal.storage.dfs.DfsPackDescription#getMaxUpdateIndex()}
* as the primary key identifier for a
* {@link org.eclipse.jgit.internal.storage.pack.PackExt#REFTABLE} only pack
* description, ensuring that when there are competing transactions one wins,
* and one will fail.
*/
@ -92,28 +94,40 @@ public class DfsReftableDatabase extends DfsRefDatabase {
super(repo);
}
/** {@inheritDoc} */
@Override
public boolean performsAtomicTransactions() {
return true;
}
/** {@inheritDoc} */
@Override
public BatchRefUpdate newBatchUpdate() {
DfsObjDatabase odb = getRepository().getObjectDatabase();
return new ReftableBatchRefUpdate(this, odb);
}
/** @return configuration to write new reftables with. */
/**
* Get configuration to write new reftables with.
*
* @return configuration to write new reftables with.
*/
public ReftableConfig getReftableConfig() {
return new ReftableConfig(getRepository().getConfig());
}
/** @return the lock protecting this instance's state. */
/**
* Get the lock protecting this instance's state.
*
* @return the lock protecting this instance's state.
*/
protected ReentrantLock getLock() {
return lock;
}
/**
* Whether to compact reftable instead of extending the stack depth.
*
* @return {@code true} if commit of a new small reftable should try to
* replace a prior small reftable by performing a compaction,
* instead of extending the stack depth.
@ -126,7 +140,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
* Obtain a handle to the merged reader.
*
* @return (possibly cached) handle to the merged reader.
* @throws IOException
* @throws java.io.IOException
* if tables cannot be opened.
*/
protected Reftable reader() throws IOException {
@ -145,7 +159,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
* Obtain a handle to the stack of reftables.
*
* @return (possibly cached) handle to the stack.
* @throws IOException
* @throws java.io.IOException
* if tables cannot be opened.
*/
protected ReftableStack stack() throws IOException {
@ -165,6 +179,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
}
}
/** {@inheritDoc} */
@Override
public boolean isNameConflicting(String refName) throws IOException {
lock.lock();
@ -187,6 +202,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
}
}
/** {@inheritDoc} */
@Override
public Ref exactRef(String name) throws IOException {
lock.lock();
@ -202,6 +218,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
}
}
/** {@inheritDoc} */
@Override
public Ref getRef(String needle) throws IOException {
for (String prefix : SEARCH_PATH) {
@ -213,6 +230,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
return null;
}
/** {@inheritDoc} */
@Override
public Map<String, Ref> getRefs(String prefix) throws IOException {
RefList.Builder<Ref> all = new RefList.Builder<>();
@ -236,6 +254,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
return new RefMap(prefix, all.toRefList(), none, none);
}
/** {@inheritDoc} */
@Override
public Ref peel(Ref ref) throws IOException {
Ref oldLeaf = ref.getLeaf();
@ -269,6 +288,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
}
}
/** {@inheritDoc} */
@Override
protected boolean compareAndPut(Ref oldRef, @Nullable Ref newRef)
throws IOException {
@ -340,11 +360,13 @@ public class DfsReftableDatabase extends DfsRefDatabase {
return oldRef != null ? oldRef.getName() : newRef.getName();
}
/** {@inheritDoc} */
@Override
protected boolean compareAndRemove(Ref oldRef) throws IOException {
return compareAndPut(oldRef, null);
}
/** {@inheritDoc} */
@Override
protected RefCache scanAllRefs() throws IOException {
throw new UnsupportedOperationException();
@ -360,6 +382,7 @@ public class DfsReftableDatabase extends DfsRefDatabase {
// Unnecessary; ReftableBatchRefUpdate calls clearCache().
}
/** {@inheritDoc} */
@Override
protected void cachePeeledState(Ref oldLeaf, Ref newLeaf) {
// Do not cache peeled state in reftable.

19
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepository.java

@ -58,7 +58,9 @@ import org.eclipse.jgit.lib.ReflogReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
/** A Git repository on a DFS. */
/**
* A Git repository on a DFS.
*/
public abstract class DfsRepository extends Repository {
private final DfsConfig config;
@ -76,10 +78,15 @@ public abstract class DfsRepository extends Repository {
this.description = builder.getRepositoryDescription();
}
/** {@inheritDoc} */
@Override
public abstract DfsObjDatabase getObjectDatabase();
/** @return a description of this repository. */
/**
* Get the description of this repository.
*
* @return the description of this repository.
*/
public DfsRepositoryDescription getDescription() {
return description;
}
@ -88,7 +95,7 @@ public abstract class DfsRepository extends Repository {
* Check if the repository already exists.
*
* @return true if the repository exists; false if it is new.
* @throws IOException
* @throws java.io.IOException
* the repository cannot be checked.
*/
public boolean exists() throws IOException {
@ -98,6 +105,7 @@ public abstract class DfsRepository extends Repository {
return true;
}
/** {@inheritDoc} */
@Override
public void create(boolean bare) throws IOException {
if (exists())
@ -110,28 +118,33 @@ public abstract class DfsRepository extends Repository {
throw new IOException(result.name());
}
/** {@inheritDoc} */
@Override
public StoredConfig getConfig() {
return config;
}
/** {@inheritDoc} */
@Override
public void scanForRepoChanges() throws IOException {
getRefDatabase().refresh();
getObjectDatabase().clearCache();
}
/** {@inheritDoc} */
@Override
public void notifyIndexChanged() {
// Do not send notifications.
// There is no index, as there is no working tree.
}
/** {@inheritDoc} */
@Override
public ReflogReader getReflogReader(String refName) throws IOException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public AttributesNodeProvider createAttributesNodeProvider() {
// TODO Check if the implementation used in FileRepository can be used

29
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepositoryBuilder.java

@ -50,7 +50,7 @@ import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.BaseRepositoryBuilder;
/**
* Constructs a {@link DfsRepository}.
* Constructs a {@link org.eclipse.jgit.internal.storage.dfs.DfsRepository}.
*
* @param <B>
* type of the builder class.
@ -63,7 +63,11 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
private DfsRepositoryDescription repoDesc;
/** @return options used by readers accessing the repository. */
/**
* Get options used by readers accessing the repository.
*
* @return options used by readers accessing the repository.
*/
public DfsReaderOptions getReaderOptions() {
return readerOptions;
}
@ -80,7 +84,11 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
return self();
}
/** @return a description of the repository. */
/**
* Get the description of the repository.
*
* @return the description of the repository.
*/
public DfsRepositoryDescription getRepositoryDescription() {
return repoDesc;
}
@ -97,6 +105,7 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
return self();
}
/** {@inheritDoc} */
@Override
public B setup() throws IllegalArgumentException, IOException {
super.setup();
@ -108,24 +117,20 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
}
/**
* {@inheritDoc}
* <p>
* Create a repository matching the configuration in this builder.
* <p>
* If an option was not set, the build method will try to default the option
* based on other options. If insufficient information is available, an
* exception is thrown to the caller.
*
* @return a repository matching this configuration.
* @throws IllegalArgumentException
* insufficient parameters were set.
* @throws IOException
* the repository could not be accessed to configure the rest of
* the builder's parameters.
*/
@Override
public abstract R build() throws IOException;
// We don't support local file IO and thus shouldn't permit these to set.
/** {@inheritDoc} */
@Override
public B setGitDir(File gitDir) {
if (gitDir != null)
@ -133,6 +138,7 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
return self();
}
/** {@inheritDoc} */
@Override
public B setObjectDirectory(File objectDirectory) {
if (objectDirectory != null)
@ -140,12 +146,14 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
return self();
}
/** {@inheritDoc} */
@Override
public B addAlternateObjectDirectory(File other) {
throw new UnsupportedOperationException(
JGitText.get().unsupportedAlternates);
}
/** {@inheritDoc} */
@Override
public B setWorkTree(File workTree) {
if (workTree != null)
@ -153,6 +161,7 @@ public abstract class DfsRepositoryBuilder<B extends DfsRepositoryBuilder, R ext
return self();
}
/** {@inheritDoc} */
@Override
public B setIndexFile(File indexFile) {
if (indexFile != null)

17
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsRepositoryDescription.java

@ -43,11 +43,15 @@
package org.eclipse.jgit.internal.storage.dfs;
/** A description of a Git repository on a DFS. */
/**
* A description of a Git repository on a DFS.
*/
public class DfsRepositoryDescription {
private final String repositoryName;
/** Initialize a new, empty repository description. */
/**
* Initialize a new, empty repository description.
*/
public DfsRepositoryDescription() {
this(null);
}
@ -62,11 +66,16 @@ public class DfsRepositoryDescription {
this.repositoryName = repositoryName;
}
/** @return the name of the repository. */
/**
* Get the name of the repository.
*
* @return the name of the repository.
*/
public String getRepositoryName() {
return repositoryName;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
if (getRepositoryName() != null)
@ -74,6 +83,7 @@ public class DfsRepositoryDescription {
return System.identityHashCode(this);
}
/** {@inheritDoc} */
@Override
public boolean equals(Object b) {
if (b instanceof DfsRepositoryDescription){
@ -84,6 +94,7 @@ public class DfsRepositoryDescription {
return false;
}
/** {@inheritDoc} */
@SuppressWarnings("nls")
@Override
public String toString() {

11
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java

@ -50,9 +50,13 @@ import java.util.Arrays;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.pack.PackExt;
/** Key used by {@link DfsBlockCache} to disambiguate streams. */
/**
* Key used by {@link org.eclipse.jgit.internal.storage.dfs.DfsBlockCache} to disambiguate streams.
*/
public abstract class DfsStreamKey {
/**
* Create a {@code DfsStreamKey}
*
* @param repo
* description of the containing repository.
* @param name
@ -71,6 +75,8 @@ public abstract class DfsStreamKey {
final int packExtPos;
/**
* Constructor for DfsStreamKey.
*
* @param hash
* hash of the other identifying components of the key.
* @param ext
@ -83,14 +89,17 @@ public abstract class DfsStreamKey {
this.packExtPos = ext == null ? 0 : ext.getPosition();
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return hash;
}
/** {@inheritDoc} */
@Override
public abstract boolean equals(Object o);
/** {@inheritDoc} */
@SuppressWarnings("boxing")
@Override
public String toString() {

10
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java

@ -46,9 +46,15 @@ package org.eclipse.jgit.internal.storage.dfs;
import org.eclipse.jgit.nls.NLS;
import org.eclipse.jgit.nls.TranslationBundle;
/** Translation bundle for the DFS storage implementation. */
/**
* Translation bundle for the DFS storage implementation.
*/
public class DfsText extends TranslationBundle {
/** @return instance of this translation bundle */
/**
* Get an instance of this translation bundle.
*
* @return instance of this translation bundle.
*/
public static DfsText get() {
return NLS.getBundleFor(DfsText.class);
}

5
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java

@ -56,11 +56,13 @@ public class InMemoryRepository extends DfsRepository {
refdb = new MemRefDatabase();
}
/** {@inheritDoc} */
@Override
public MemObjDatabase getObjectDatabase() {
return objdb;
}
/** {@inheritDoc} */
@Override
public RefDatabase getRefDatabase() {
return refdb;
@ -72,17 +74,20 @@ public class InMemoryRepository extends DfsRepository {
* Useful for testing atomic support enabled or disabled.
*
* @param atomic
* whether to use atomic reference transaction support
*/
public void setPerformsAtomicTransactions(boolean atomic) {
refdb.performsAtomicTransactions = atomic;
}
/** {@inheritDoc} */
@Override
@Nullable
public String getGitwebDescription() {
return gitwebDescription;
}
/** {@inheritDoc} */
@Override
public void setGitwebDescription(@Nullable String d) {
gitwebDescription = d;

5
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java

@ -77,26 +77,31 @@ final class LargePackedWholeObject extends ObjectLoader {
this.db = db;
}
/** {@inheritDoc} */
@Override
public int getType() {
return type;
}
/** {@inheritDoc} */
@Override
public long getSize() {
return size;
}
/** {@inheritDoc} */
@Override
public boolean isLarge() {
return true;
}
/** {@inheritDoc} */
@Override
public byte[] getCachedBytes() throws LargeObjectException {
throw new LargeObjectException();
}
/** {@inheritDoc} */
@Override
public ObjectStream openStream() throws MissingObjectException, IOException {
DfsReader ctx = db.newReader();

3
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackInputStream.java

@ -64,6 +64,7 @@ final class PackInputStream extends InputStream {
ctx.pin(pack, pos);
}
/** {@inheritDoc} */
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n = ctx.copy(pack, pos, b, off, len);
@ -71,6 +72,7 @@ final class PackInputStream extends InputStream {
return n;
}
/** {@inheritDoc} */
@Override
public int read() throws IOException {
byte[] buf = new byte[1];
@ -78,6 +80,7 @@ final class PackInputStream extends InputStream {
return n == 1 ? buf[0] & 0xff : -1;
}
/** {@inheritDoc} */
@Override
public void close() {
ctx.close();

19
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReadableChannel.java

@ -46,13 +46,15 @@ package org.eclipse.jgit.internal.storage.dfs;
import java.io.IOException;
import java.nio.channels.ReadableByteChannel;
/** Readable random access byte channel from a file. */
/**
* Readable random access byte channel from a file.
*/
public interface ReadableChannel extends ReadableByteChannel {
/**
* Get the current position of the channel.
*
* @return r current offset.
* @throws IOException
* @throws java.io.IOException
* the channel's current position cannot be obtained.
*/
public long position() throws IOException;
@ -63,7 +65,7 @@ public interface ReadableChannel extends ReadableByteChannel {
* @param newPosition
* position to move the channel to. The next read will start from
* here. This should be a multiple of the {@link #blockSize()}.
* @throws IOException
* @throws java.io.IOException
* the position cannot be updated. This may be because the
* channel only supports block aligned IO and the current
* position is not block aligned.
@ -78,7 +80,7 @@ public interface ReadableChannel extends ReadableByteChannel {
* read has been completed, the underlying file size should be available.
*
* @return r total size of the channel; -1 if not yet available.
* @throws IOException
* @throws java.io.IOException
* the size cannot be determined.
*/
public long size() throws IOException;
@ -92,9 +94,10 @@ public interface ReadableChannel extends ReadableByteChannel {
* <p>
* Channels should not recommend large block sizes. Sizes up to 1-4 MiB may
* be reasonable, but sizes above that may be horribly inefficient. The
* {@link DfsBlockCache} favors the alignment suggested by the channel
* rather than the configured size under the assumption that reads are very
* expensive and the channel knows what size is best to access it with.
* {@link org.eclipse.jgit.internal.storage.dfs.DfsBlockCache} favors the
* alignment suggested by the channel rather than the configured size under
* the assumption that reads are very expensive and the channel knows what
* size is best to access it with.
*
* @return recommended alignment size for randomly positioned reads. Does
* not need to be a power of 2.
@ -125,7 +128,7 @@ public interface ReadableChannel extends ReadableByteChannel {
*
* @param bufferSize
* requested size of the read ahead buffer, in bytes.
* @throws IOException
* @throws java.io.IOException
* if the read ahead cannot be adjusted.
*/
public void setReadAheadBytes(int bufferSize) throws IOException;

6
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java

@ -90,7 +90,10 @@ import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.ReceiveCommand;
/** {@link BatchRefUpdate} for {@link DfsReftableDatabase}. */
/**
* {@link org.eclipse.jgit.lib.BatchRefUpdate} for
* {@link org.eclipse.jgit.internal.storage.dfs.DfsReftableDatabase}.
*/
public class ReftableBatchRefUpdate extends BatchRefUpdate {
private static final int AVG_BYTES = 36;
@ -119,6 +122,7 @@ public class ReftableBatchRefUpdate extends BatchRefUpdate {
reftableConfig = refdb.getReftableConfig();
}
/** {@inheritDoc} */
@Override
public void execute(RevWalk rw, ProgressMonitor pm, List<String> options) {
List<ReceiveCommand> pending = getPending();

12
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java

@ -50,7 +50,10 @@ import java.util.List;
import org.eclipse.jgit.internal.storage.reftable.Reftable;
/** Tracks multiple open {@link Reftable} instances. */
/**
* Tracks multiple open
* {@link org.eclipse.jgit.internal.storage.reftable.Reftable} instances.
*/
public class ReftableStack implements AutoCloseable {
/**
* Opens a stack of tables for reading.
@ -61,7 +64,7 @@ public class ReftableStack implements AutoCloseable {
* @param files
* the tables to open.
* @return stack reference to close the tables.
* @throws IOException
* @throws java.io.IOException
* a table could not be opened
*/
public static ReftableStack open(DfsReader ctx, List<DfsReftable> files)
@ -91,6 +94,8 @@ public class ReftableStack implements AutoCloseable {
}
/**
* Get unmodifiable list of DfsRefatble files
*
* @return unmodifiable list of DfsRefatble files, in the same order the
* files were passed to {@link #open(DfsReader, List)}.
*/
@ -99,6 +104,8 @@ public class ReftableStack implements AutoCloseable {
}
/**
* Get unmodifiable list of tables
*
* @return unmodifiable list of tables, in the same order the files were
* passed to {@link #open(DfsReader, List)}.
*/
@ -106,6 +113,7 @@ public class ReftableStack implements AutoCloseable {
return Collections.unmodifiableList(tables);
}
/** {@inheritDoc} */
@Override
public void close() {
for (Reftable t : tables) {

Loading…
Cancel
Save