Browse Source

Merge branch 'stable-4.9'

* stable-4.9:
  PackInserter: Implement newReader()
  Move some strings from DfsText to JGitText
  FileRepository: Add pack-based inserter implementation
  ObjectDirectory: Factor a method to close open pack handles
  ObjectDirectory: Remove last modified check in insertPack

Change-Id: Ifc9ed6f5d8336bc978818a64eae122bceb933e5d
stable-4.10
David Pursehouse 7 years ago
parent
commit
651e17baca
  1. 539
      org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackInserterTest.java
  2. 2
      org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
  3. 2
      org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties
  4. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
  5. 10
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
  6. 2
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java
  7. 11
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
  8. 27
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
  9. 639
      org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java

539
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackInserterTest.java

@ -0,0 +1,539 @@
/*
* Copyright (C) 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.file;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.toList;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.storage.file.WindowCacheConfig;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import org.eclipse.jgit.util.IO;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@SuppressWarnings("boxing")
public class PackInserterTest extends RepositoryTestCase {
private WindowCacheConfig origWindowCacheConfig;
@Before
public void setWindowCacheConfig() {
origWindowCacheConfig = new WindowCacheConfig();
origWindowCacheConfig.install();
}
@After
public void resetWindowCacheConfig() {
origWindowCacheConfig.install();
}
@Before
public void emptyAtSetUp() throws Exception {
assertEquals(0, listPacks().size());
assertNoObjects();
}
@Test
public void noFlush() throws Exception {
try (PackInserter ins = newInserter()) {
ins.insert(OBJ_BLOB, Constants.encode("foo contents"));
// No flush.
}
assertNoObjects();
}
@Test
public void flushEmptyPack() throws Exception {
try (PackInserter ins = newInserter()) {
ins.flush();
}
assertNoObjects();
}
@Test
public void singlePack() throws Exception {
ObjectId blobId;
byte[] blob = Constants.encode("foo contents");
ObjectId treeId;
ObjectId commitId;
byte[] commit;
try (PackInserter ins = newInserter()) {
blobId = ins.insert(OBJ_BLOB, blob);
DirCache dc = DirCache.newInCore();
DirCacheBuilder b = dc.builder();
DirCacheEntry dce = new DirCacheEntry("foo");
dce.setFileMode(FileMode.REGULAR_FILE);
dce.setObjectId(blobId);
b.add(dce);
b.finish();
treeId = dc.writeTree(ins);
CommitBuilder cb = new CommitBuilder();
cb.setTreeId(treeId);
cb.setAuthor(author);
cb.setCommitter(committer);
cb.setMessage("Commit message");
commit = cb.toByteArray();
commitId = ins.insert(cb);
ins.flush();
}
assertPacksOnly();
List<PackFile> packs = listPacks();
assertEquals(1, packs.size());
assertEquals(3, packs.get(0).getObjectCount());
try (ObjectReader reader = db.newObjectReader()) {
assertBlob(reader, blobId, blob);
CanonicalTreeParser treeParser =
new CanonicalTreeParser(null, reader, treeId);
assertEquals("foo", treeParser.getEntryPathString());
assertEquals(blobId, treeParser.getEntryObjectId());
ObjectLoader commitLoader = reader.open(commitId);
assertEquals(OBJ_COMMIT, commitLoader.getType());
assertArrayEquals(commit, commitLoader.getBytes());
}
}
@Test
public void multiplePacks() throws Exception {
ObjectId blobId1;
ObjectId blobId2;
byte[] blob1 = Constants.encode("blob1");
byte[] blob2 = Constants.encode("blob2");
try (PackInserter ins = newInserter()) {
blobId1 = ins.insert(OBJ_BLOB, blob1);
ins.flush();
blobId2 = ins.insert(OBJ_BLOB, blob2);
ins.flush();
}
assertPacksOnly();
List<PackFile> packs = listPacks();
assertEquals(2, packs.size());
assertEquals(1, packs.get(0).getObjectCount());
assertEquals(1, packs.get(1).getObjectCount());
try (ObjectReader reader = db.newObjectReader()) {
assertBlob(reader, blobId1, blob1);
assertBlob(reader, blobId2, blob2);
}
}
@Test
public void largeBlob() throws Exception {
ObjectId blobId;
byte[] blob = newLargeBlob();
try (PackInserter ins = newInserter()) {
assertThat(blob.length, greaterThan(ins.getBufferSize()));
blobId =
ins.insert(OBJ_BLOB, blob.length, new ByteArrayInputStream(blob));
ins.flush();
}
assertPacksOnly();
Collection<PackFile> packs = listPacks();
assertEquals(1, packs.size());
PackFile p = packs.iterator().next();
assertEquals(1, p.getObjectCount());
try (ObjectReader reader = db.newObjectReader()) {
assertBlob(reader, blobId, blob);
}
}
@Test
public void overwriteExistingPack() throws Exception {
ObjectId blobId;
byte[] blob = Constants.encode("foo contents");
try (PackInserter ins = newInserter()) {
blobId = ins.insert(OBJ_BLOB, blob);
ins.flush();
}
assertPacksOnly();
List<PackFile> packs = listPacks();
assertEquals(1, packs.size());
PackFile pack = packs.get(0);
assertEquals(1, pack.getObjectCount());
String inode = getInode(pack.getPackFile());
try (PackInserter ins = newInserter()) {
ins.checkExisting(false);
assertEquals(blobId, ins.insert(OBJ_BLOB, blob));
ins.flush();
}
assertPacksOnly();
packs = listPacks();
assertEquals(1, packs.size());
pack = packs.get(0);
assertEquals(1, pack.getObjectCount());
if (inode != null) {
// Old file was overwritten with new file, although objects were
// equivalent.
assertNotEquals(inode, getInode(pack.getPackFile()));
}
}
@Test
public void checkExisting() throws Exception {
ObjectId blobId;
byte[] blob = Constants.encode("foo contents");
try (PackInserter ins = newInserter()) {
blobId = ins.insert(OBJ_BLOB, blob);
ins.insert(OBJ_BLOB, Constants.encode("another blob"));
ins.flush();
}
assertPacksOnly();
assertEquals(1, listPacks().size());
try (PackInserter ins = newInserter()) {
assertEquals(blobId, ins.insert(OBJ_BLOB, blob));
ins.flush();
}
assertPacksOnly();
assertEquals(1, listPacks().size());
try (PackInserter ins = newInserter()) {
ins.checkExisting(false);
assertEquals(blobId, ins.insert(OBJ_BLOB, blob));
ins.flush();
}
assertPacksOnly();
assertEquals(2, listPacks().size());
try (ObjectReader reader = db.newObjectReader()) {
assertBlob(reader, blobId, blob);
}
}
@Test
public void insertSmallInputStreamRespectsCheckExisting() throws Exception {
ObjectId blobId;
byte[] blob = Constants.encode("foo contents");
try (PackInserter ins = newInserter()) {
assertThat(blob.length, lessThan(ins.getBufferSize()));
blobId = ins.insert(OBJ_BLOB, blob);
ins.insert(OBJ_BLOB, Constants.encode("another blob"));
ins.flush();
}
assertPacksOnly();
assertEquals(1, listPacks().size());
try (PackInserter ins = newInserter()) {
assertEquals(blobId,
ins.insert(OBJ_BLOB, blob.length, new ByteArrayInputStream(blob)));
ins.flush();
}
assertPacksOnly();
assertEquals(1, listPacks().size());
}
@Test
public void insertLargeInputStreamBypassesCheckExisting() throws Exception {
ObjectId blobId;
byte[] blob = newLargeBlob();
try (PackInserter ins = newInserter()) {
assertThat(blob.length, greaterThan(ins.getBufferSize()));
blobId = ins.insert(OBJ_BLOB, blob);
ins.insert(OBJ_BLOB, Constants.encode("another blob"));
ins.flush();
}
assertPacksOnly();
assertEquals(1, listPacks().size());
try (PackInserter ins = newInserter()) {
assertEquals(blobId,
ins.insert(OBJ_BLOB, blob.length, new ByteArrayInputStream(blob)));
ins.flush();
}
assertPacksOnly();
assertEquals(2, listPacks().size());
}
@Test
public void readBackSmallFiles() throws Exception {
ObjectId blobId1;
ObjectId blobId2;
ObjectId blobId3;
byte[] blob1 = Constants.encode("blob1");
byte[] blob2 = Constants.encode("blob2");
byte[] blob3 = Constants.encode("blob3");
try (PackInserter ins = newInserter()) {
assertThat(blob1.length, lessThan(ins.getBufferSize()));
blobId1 = ins.insert(OBJ_BLOB, blob1);
try (ObjectReader reader = ins.newReader()) {
assertBlob(reader, blobId1, blob1);
}
// Read-back should not mess up the file pointer.
blobId2 = ins.insert(OBJ_BLOB, blob2);
ins.flush();
blobId3 = ins.insert(OBJ_BLOB, blob3);
}
assertPacksOnly();
List<PackFile> packs = listPacks();
assertEquals(1, packs.size());
assertEquals(2, packs.get(0).getObjectCount());
try (ObjectReader reader = db.newObjectReader()) {
assertBlob(reader, blobId1, blob1);
assertBlob(reader, blobId2, blob2);
try {
reader.open(blobId3);
fail("Expected MissingObjectException");
} catch (MissingObjectException expected) {
// Expected.
}
}
}
@Test
public void readBackLargeFile() throws Exception {
ObjectId blobId;
byte[] blob = newLargeBlob();
WindowCacheConfig wcc = new WindowCacheConfig();
wcc.setStreamFileThreshold(1024);
wcc.install();
try (ObjectReader reader = db.newObjectReader()) {
assertThat(blob.length, greaterThan(reader.getStreamFileThreshold()));
}
try (PackInserter ins = newInserter()) {
blobId = ins.insert(OBJ_BLOB, blob);
try (ObjectReader reader = ins.newReader()) {
// Double-check threshold is propagated.
assertThat(blob.length, greaterThan(reader.getStreamFileThreshold()));
assertBlob(reader, blobId, blob);
}
}
assertPacksOnly();
// Pack was streamed out to disk and read back from the temp file, but
// ultimately rolled back and deleted.
assertEquals(0, listPacks().size());
try (ObjectReader reader = db.newObjectReader()) {
try {
reader.open(blobId);
fail("Expected MissingObjectException");
} catch (MissingObjectException expected) {
// Expected.
}
}
}
@Test
public void readBackFallsBackToRepo() throws Exception {
ObjectId blobId;
byte[] blob = Constants.encode("foo contents");
try (PackInserter ins = newInserter()) {
assertThat(blob.length, lessThan(ins.getBufferSize()));
blobId = ins.insert(OBJ_BLOB, blob);
ins.flush();
}
try (PackInserter ins = newInserter();
ObjectReader reader = ins.newReader()) {
assertBlob(reader, blobId, blob);
}
}
private List<PackFile> listPacks() throws Exception {
List<PackFile> fromOpenDb = listPacks(db);
List<PackFile> reopened;
try (FileRepository db2 = new FileRepository(db.getDirectory())) {
reopened = listPacks(db2);
}
assertEquals(fromOpenDb.size(), reopened.size());
for (int i = 0 ; i < fromOpenDb.size(); i++) {
PackFile a = fromOpenDb.get(i);
PackFile b = reopened.get(i);
assertEquals(a.getPackName(), b.getPackName());
assertEquals(
a.getPackFile().getAbsolutePath(), b.getPackFile().getAbsolutePath());
assertEquals(a.getObjectCount(), b.getObjectCount());
a.getObjectCount();
}
return fromOpenDb;
}
private static List<PackFile> listPacks(FileRepository db) throws Exception {
return db.getObjectDatabase().getPacks().stream()
.sorted(comparing(PackFile::getPackName)).collect(toList());
}
private PackInserter newInserter() {
return db.getObjectDatabase().newPackInserter();
}
private static byte[] newLargeBlob() {
byte[] blob = new byte[10240];
for (int i = 0; i < blob.length; i++) {
blob[i] = (byte) ('0' + (i % 10));
}
return blob;
}
private static String getInode(File f) throws Exception {
BasicFileAttributes attrs = Files.readAttributes(
f.toPath(), BasicFileAttributes.class);
Object k = attrs.fileKey();
if (k == null) {
return null;
}
Pattern p = Pattern.compile("^\\(dev=[^,]*,ino=(\\d+)\\)$");
Matcher m = p.matcher(k.toString());
return m.matches() ? m.group(1) : null;
}
private static void assertBlob(ObjectReader reader, ObjectId id,
byte[] expected) throws Exception {
ObjectLoader loader = reader.open(id);
assertEquals(OBJ_BLOB, loader.getType());
assertEquals(expected.length, loader.getSize());
try (ObjectStream s = loader.openStream()) {
int n = (int) s.getSize();
byte[] actual = new byte[n];
assertEquals(n, IO.readFully(s, actual, 0));
assertArrayEquals(expected, actual);
}
}
private void assertPacksOnly() throws Exception {
new BadFileCollector(f -> !f.endsWith(".pack") && !f.endsWith(".idx"))
.assertNoBadFiles(db.getObjectDatabase().getDirectory());
}
private void assertNoObjects() throws Exception {
new BadFileCollector(f -> true)
.assertNoBadFiles(db.getObjectDatabase().getDirectory());
}
private static class BadFileCollector extends SimpleFileVisitor<Path> {
private final Predicate<String> badName;
private List<String> bad;
BadFileCollector(Predicate<String> badName) {
this.badName = badName;
}
void assertNoBadFiles(File f) throws IOException {
bad = new ArrayList<>();
Files.walkFileTree(f.toPath(), this);
if (!bad.isEmpty()) {
fail("unexpected files in object directory: " + bad);
}
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
String name = file.getFileName().toString();
if (!attrs.isDirectory() && badName.test(name)) {
bad.add(name);
}
return FileVisitResult.CONTINUE;
}
}
}

2
org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties

@ -90,6 +90,7 @@ cannotParseDate=The date specification "{0}" could not be parsed with the follow
cannotParseGitURIish=Cannot parse Git URI-ish cannotParseGitURIish=Cannot parse Git URI-ish
cannotPullOnARepoWithState=Cannot pull into a repository with state: {0} cannotPullOnARepoWithState=Cannot pull into a repository with state: {0}
cannotRead=Cannot read {0} cannotRead=Cannot read {0}
cannotReadBackDelta=Cannot read delta type {0}
cannotReadBlob=Cannot read blob {0} cannotReadBlob=Cannot read blob {0}
cannotReadCommit=Cannot read commit {0} cannotReadCommit=Cannot read commit {0}
cannotReadFile=Cannot read file {0} cannotReadFile=Cannot read file {0}
@ -687,6 +688,7 @@ unencodeableFile=Unencodable file: {0}
unexpectedCompareResult=Unexpected metadata comparison result: {0} unexpectedCompareResult=Unexpected metadata comparison result: {0}
unexpectedEndOfConfigFile=Unexpected end of config file unexpectedEndOfConfigFile=Unexpected end of config file
unexpectedEndOfInput=Unexpected end of input unexpectedEndOfInput=Unexpected end of input
unexpectedEofInPack=Unexpected EOF in partially created pack
unexpectedHunkTrailer=Unexpected hunk trailer unexpectedHunkTrailer=Unexpected hunk trailer
unexpectedOddResult=odd: {0} + {1} - {2} unexpectedOddResult=odd: {0} + {1} - {2}
unexpectedRefReport={0}: unexpected ref report: {1} unexpectedRefReport={0}: unexpected ref report: {1}

2
org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties

@ -1,6 +1,4 @@
cannotReadIndex=Cannot read index {0} cannotReadIndex=Cannot read index {0}
cannotReadBackDelta=Cannot read delta type {0}
shortReadOfBlock=Short read of block at {0} in pack {1}; expected {2} bytes, received only {3} shortReadOfBlock=Short read of block at {0} in pack {1}; expected {2} bytes, received only {3}
shortReadOfIndex=Short read of index {0} shortReadOfIndex=Short read of index {0}
unexpectedEofInPack=Unexpected EOF in partially created pack
willNotStoreEmptyPack=Cannot store empty pack willNotStoreEmptyPack=Cannot store empty pack

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java

@ -149,6 +149,7 @@ public class JGitText extends TranslationBundle {
/***/ public String cannotParseGitURIish; /***/ public String cannotParseGitURIish;
/***/ public String cannotPullOnARepoWithState; /***/ public String cannotPullOnARepoWithState;
/***/ public String cannotRead; /***/ public String cannotRead;
/***/ public String cannotReadBackDelta;
/***/ public String cannotReadBlob; /***/ public String cannotReadBlob;
/***/ public String cannotReadCommit; /***/ public String cannotReadCommit;
/***/ public String cannotReadFile; /***/ public String cannotReadFile;
@ -746,6 +747,7 @@ public class JGitText extends TranslationBundle {
/***/ public String unexpectedCompareResult; /***/ public String unexpectedCompareResult;
/***/ public String unexpectedEndOfConfigFile; /***/ public String unexpectedEndOfConfigFile;
/***/ public String unexpectedEndOfInput; /***/ public String unexpectedEndOfInput;
/***/ public String unexpectedEofInPack;
/***/ public String unexpectedHunkTrailer; /***/ public String unexpectedHunkTrailer;
/***/ public String unexpectedOddResult; /***/ public String unexpectedOddResult;
/***/ public String unexpectedRefReport; /***/ public String unexpectedRefReport;

10
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java

@ -500,7 +500,7 @@ public class DfsInserter extends ObjectInserter {
inf.setInput(currBuf, s, n); inf.setInput(currBuf, s, n);
return n; return n;
} }
throw new EOFException(DfsText.get().unexpectedEofInPack); throw new EOFException(JGitText.get().unexpectedEofInPack);
} }
private DfsBlock getOrLoadBlock(long pos) throws IOException { private DfsBlock getOrLoadBlock(long pos) throws IOException {
@ -513,7 +513,7 @@ public class DfsInserter extends ObjectInserter {
for (int p = 0; p < blockSize;) { for (int p = 0; p < blockSize;) {
int n = out.read(s + p, ByteBuffer.wrap(d, p, blockSize - p)); int n = out.read(s + p, ByteBuffer.wrap(d, p, blockSize - p));
if (n <= 0) if (n <= 0)
throw new EOFException(DfsText.get().unexpectedEofInPack); throw new EOFException(JGitText.get().unexpectedEofInPack);
p += n; p += n;
} }
b = new DfsBlock(packKey, s, d); b = new DfsBlock(packKey, s, d);
@ -569,13 +569,13 @@ public class DfsInserter extends ObjectInserter {
byte[] buf = buffer(); byte[] buf = buffer();
int cnt = packOut.read(obj.getOffset(), buf, 0, 20); int cnt = packOut.read(obj.getOffset(), buf, 0, 20);
if (cnt <= 0) if (cnt <= 0)
throw new EOFException(DfsText.get().unexpectedEofInPack); throw new EOFException(JGitText.get().unexpectedEofInPack);
int c = buf[0] & 0xff; int c = buf[0] & 0xff;
int type = (c >> 4) & 7; int type = (c >> 4) & 7;
if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA) if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA)
throw new IOException(MessageFormat.format( throw new IOException(MessageFormat.format(
DfsText.get().cannotReadBackDelta, Integer.toString(type))); JGitText.get().cannotReadBackDelta, Integer.toString(type)));
if (typeHint != OBJ_ANY && type != typeHint) { if (typeHint != OBJ_ANY && type != typeHint) {
throw new IncorrectObjectTypeException(objectId.copy(), typeHint); throw new IncorrectObjectTypeException(objectId.copy(), typeHint);
} }
@ -585,7 +585,7 @@ public class DfsInserter extends ObjectInserter {
int shift = 4; int shift = 4;
while ((c & 0x80) != 0) { while ((c & 0x80) != 0) {
if (ptr >= cnt) if (ptr >= cnt)
throw new EOFException(DfsText.get().unexpectedEofInPack); throw new EOFException(JGitText.get().unexpectedEofInPack);
c = buf[ptr++] & 0xff; c = buf[ptr++] & 0xff;
sz += ((long) (c & 0x7f)) << shift; sz += ((long) (c & 0x7f)) << shift;
shift += 7; shift += 7;

2
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java

@ -55,9 +55,7 @@ public class DfsText extends TranslationBundle {
// @formatter:off // @formatter:off
/***/ public String cannotReadIndex; /***/ public String cannotReadIndex;
/***/ public String cannotReadBackDelta;
/***/ public String shortReadOfBlock; /***/ public String shortReadOfBlock;
/***/ public String shortReadOfIndex; /***/ public String shortReadOfIndex;
/***/ public String unexpectedEofInPack;
/***/ public String willNotStoreEmptyPack; /***/ public String willNotStoreEmptyPack;
} }

11
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java

@ -1173,16 +1173,7 @@ public class GC {
// rename the temporary files to real files // rename the temporary files to real files
File realPack = nameFor(id, ".pack"); //$NON-NLS-1$ File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
// if the packfile already exists (because we are rewriting a repo.getObjectDatabase().closeAllPackHandles(realPack);
// packfile for the same set of objects maybe with different
// PackConfig) then make sure we get rid of all handles on the file.
// Windows will not allow for rename otherwise.
if (realPack.exists())
for (PackFile p : repo.getObjectDatabase().getPacks())
if (realPack.getPath().equals(p.getPackFile().getPath())) {
p.close();
break;
}
tmpPack.setReadOnly(); tmpPack.setReadOnly();
FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE); FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);

27
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java

@ -220,6 +220,16 @@ public class ObjectDirectory extends FileObjectDatabase {
return new ObjectDirectoryInserter(this, config); return new ObjectDirectoryInserter(this, config);
} }
/**
* Create a new inserter that inserts all objects as pack files, not loose
* objects.
*
* @return new inserter.
*/
public PackInserter newPackInserter() {
return new PackInserter(this);
}
@Override @Override
public void close() { public void close() {
unpackedObjectCache.clear(); unpackedObjectCache.clear();
@ -814,8 +824,6 @@ public class ObjectDirectory extends FileObjectDatabase {
final PackFile[] oldList = o.packs; final PackFile[] oldList = o.packs;
final String name = pf.getPackFile().getName(); final String name = pf.getPackFile().getName();
for (PackFile p : oldList) { for (PackFile p : oldList) {
if (PackFile.SORT.compare(pf, p) < 0)
break;
if (name.equals(p.getPackFile().getName())) if (name.equals(p.getPackFile().getName()))
return; return;
} }
@ -971,6 +979,21 @@ public class ObjectDirectory extends FileObjectDatabase {
return nameSet; return nameSet;
} }
void closeAllPackHandles(File packFile) {
// if the packfile already exists (because we are rewriting a
// packfile for the same set of objects maybe with different
// PackConfig) then make sure we get rid of all handles on the file.
// Windows will not allow for rename otherwise.
if (packFile.exists()) {
for (PackFile p : getPacks()) {
if (packFile.getPath().equals(p.getPackFile().getPath())) {
p.close();
break;
}
}
}
}
AlternateHandle[] myAlternates() { AlternateHandle[] myAlternates() {
AlternateHandle[] alt = alternates.get(); AlternateHandle[] alt = alternates.get();
if (alt == null) { if (alt == null) {

639
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java

@ -0,0 +1,639 @@
/*
* Copyright (C) 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.file;
import static java.nio.file.StandardCopyOption.ATOMIC_MOVE;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
import java.io.BufferedInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.nio.channels.Channels;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.CRC32;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.InflaterCache;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdOwnerMap;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.transport.PackedObjectInfo;
import org.eclipse.jgit.util.BlockList;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.io.CountingOutputStream;
import org.eclipse.jgit.util.sha1.SHA1;
/**
* Object inserter that inserts one pack per call to {@link #flush()}, and never
* inserts loose objects.
*/
class PackInserter extends ObjectInserter {
/** Always produce version 2 indexes, to get CRC data. */
private static final int INDEX_VERSION = 2;
private final ObjectDirectory db;
private List<PackedObjectInfo> objectList;
private ObjectIdOwnerMap<PackedObjectInfo> objectMap;
private boolean rollback;
private boolean checkExisting = true;
private int compression = Deflater.BEST_COMPRESSION;
private File tmpPack;
private PackStream packOut;
private Inflater cachedInflater;
PackInserter(ObjectDirectory db) {
this.db = db;
}
/**
* @param check
* if false, will write out possibly-duplicate objects without
* first checking whether they exist in the repo; default is true.
*/
public void checkExisting(boolean check) {
checkExisting = check;
}
/**
* @param compression
* compression level for zlib deflater.
*/
public void setCompressionLevel(int compression) {
this.compression = compression;
}
int getBufferSize() {
return buffer().length;
}
@Override
public ObjectId insert(int type, byte[] data, int off, int len)
throws IOException {
ObjectId id = idFor(type, data, off, len);
if (objectMap != null && objectMap.contains(id)) {
return id;
}
// Ignore loose objects, which are potentially unreachable.
if (checkExisting && db.hasPackedObject(id)) {
return id;
}
long offset = beginObject(type, len);
packOut.compress.write(data, off, len);
packOut.compress.finish();
return endObject(id, offset);
}
@Override
public ObjectId insert(int type, long len, InputStream in)
throws IOException {
byte[] buf = buffer();
if (len <= buf.length) {
IO.readFully(in, buf, 0, (int) len);
return insert(type, buf, 0, (int) len);
}
long offset = beginObject(type, len);
SHA1 md = digest();
md.update(Constants.encodedTypeString(type));
md.update((byte) ' ');
md.update(Constants.encodeASCII(len));
md.update((byte) 0);
while (0 < len) {
int n = in.read(buf, 0, (int) Math.min(buf.length, len));
if (n <= 0) {
throw new EOFException();
}
md.update(buf, 0, n);
packOut.compress.write(buf, 0, n);
len -= n;
}
packOut.compress.finish();
return endObject(md.toObjectId(), offset);
}
private long beginObject(int type, long len) throws IOException {
if (packOut == null) {
beginPack();
}
long offset = packOut.getOffset();
packOut.beginObject(type, len);
return offset;
}
private ObjectId endObject(ObjectId id, long offset) {
PackedObjectInfo obj = new PackedObjectInfo(id);
obj.setOffset(offset);
obj.setCRC((int) packOut.crc32.getValue());
objectList.add(obj);
objectMap.addIfAbsent(obj);
return id;
}
private static File idxFor(File packFile) {
String p = packFile.getName();
return new File(
packFile.getParentFile(),
p.substring(0, p.lastIndexOf('.')) + ".idx"); //$NON-NLS-1$
}
private void beginPack() throws IOException {
objectList = new BlockList<>();
objectMap = new ObjectIdOwnerMap<>();
rollback = true;
tmpPack = File.createTempFile("insert_", ".pack", db.getDirectory()); //$NON-NLS-1$ //$NON-NLS-2$
packOut = new PackStream(tmpPack);
// Write the header as though it were a single object pack.
packOut.write(packOut.hdrBuf, 0, writePackHeader(packOut.hdrBuf, 1));
}
private static int writePackHeader(byte[] buf, int objectCount) {
System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4);
NB.encodeInt32(buf, 4, 2); // Always use pack version 2.
NB.encodeInt32(buf, 8, objectCount);
return 12;
}
@Override
public PackParser newPackParser(InputStream in) {
throw new UnsupportedOperationException();
}
@Override
public ObjectReader newReader() {
return new Reader();
}
@Override
public void flush() throws IOException {
if (tmpPack == null) {
return;
}
if (packOut == null) {
throw new IOException();
}
byte[] packHash;
try {
packHash = packOut.finishPack();
} finally {
packOut.close();
packOut = null;
}
Collections.sort(objectList);
File tmpIdx = idxFor(tmpPack);
writePackIndex(tmpIdx, packHash, objectList);
File realPack = new File(
new File(db.getDirectory(), "pack"), //$NON-NLS-1$
"pack-" + computeName(objectList).name() + ".pack"); //$NON-NLS-1$ //$NON-NLS-2$
db.closeAllPackHandles(realPack);
tmpPack.setReadOnly();
FileUtils.rename(tmpPack, realPack, ATOMIC_MOVE);
File realIdx = idxFor(realPack);
tmpIdx.setReadOnly();
try {
FileUtils.rename(tmpIdx, realIdx, ATOMIC_MOVE);
} catch (IOException e) {
File newIdx = new File(
realIdx.getParentFile(), realIdx.getName() + ".new"); //$NON-NLS-1$
try {
FileUtils.rename(tmpIdx, newIdx, ATOMIC_MOVE);
} catch (IOException e2) {
newIdx = tmpIdx;
e = e2;
}
throw new IOException(MessageFormat.format(
JGitText.get().panicCantRenameIndexFile, newIdx,
realIdx), e);
}
db.openPack(realPack);
rollback = false;
clear();
}
private static void writePackIndex(File idx, byte[] packHash,
List<PackedObjectInfo> list) throws IOException {
try (OutputStream os = new FileOutputStream(idx)) {
PackIndexWriter w = PackIndexWriter.createVersion(os, INDEX_VERSION);
w.write(list, packHash);
}
}
private ObjectId computeName(List<PackedObjectInfo> list) {
SHA1 md = digest().reset();
byte[] buf = buffer();
for (PackedObjectInfo otp : list) {
otp.copyRawTo(buf, 0);
md.update(buf, 0, OBJECT_ID_LENGTH);
}
return ObjectId.fromRaw(md.digest());
}
@Override
public void close() {
try {
if (packOut != null) {
try {
packOut.close();
} catch (IOException err) {
// Ignore a close failure, the pack should be removed.
}
}
if (rollback && tmpPack != null) {
try {
FileUtils.delete(tmpPack);
} catch (IOException e) {
// Still delete idx.
}
try {
FileUtils.delete(idxFor(tmpPack));
} catch (IOException e) {
// Ignore error deleting temp idx.
}
rollback = false;
}
} finally {
clear();
try {
InflaterCache.release(cachedInflater);
} finally {
cachedInflater = null;
}
}
}
private void clear() {
objectList = null;
objectMap = null;
tmpPack = null;
packOut = null;
}
private Inflater inflater() {
if (cachedInflater == null) {
cachedInflater = InflaterCache.get();
} else {
cachedInflater.reset();
}
return cachedInflater;
}
private class PackStream extends OutputStream {
final byte[] hdrBuf;
final CRC32 crc32;
final DeflaterOutputStream compress;
private final RandomAccessFile file;
private final CountingOutputStream out;
private final Deflater deflater;
PackStream(File pack) throws IOException {
file = new RandomAccessFile(pack, "rw"); //$NON-NLS-1$
out = new CountingOutputStream(new FileOutputStream(file.getFD()));
deflater = new Deflater(compression);
compress = new DeflaterOutputStream(this, deflater, 8192);
hdrBuf = new byte[32];
crc32 = new CRC32();
}
long getOffset() {
return out.getCount();
}
void beginObject(int objectType, long length) throws IOException {
crc32.reset();
deflater.reset();
write(hdrBuf, 0, encodeTypeSize(objectType, length));
}
private int encodeTypeSize(int type, long rawLength) {
long nextLength = rawLength >>> 4;
hdrBuf[0] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (rawLength & 0x0F));
rawLength = nextLength;
int n = 1;
while (rawLength > 0) {
nextLength >>>= 7;
hdrBuf[n++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (rawLength & 0x7F));
rawLength = nextLength;
}
return n;
}
@Override
public void write(final int b) throws IOException {
hdrBuf[0] = (byte) b;
write(hdrBuf, 0, 1);
}
@Override
public void write(byte[] data, int off, int len) throws IOException {
crc32.update(data, off, len);
out.write(data, off, len);
}
byte[] finishPack() throws IOException {
// Overwrite placeholder header with actual object count, then hash.
file.seek(0);
write(hdrBuf, 0, writePackHeader(hdrBuf, objectList.size()));
byte[] buf = buffer();
SHA1 md = digest().reset();
file.seek(0);
while (true) {
int r = file.read(buf);
if (r < 0) {
break;
}
md.update(buf, 0, r);
}
byte[] packHash = md.digest();
out.write(packHash, 0, packHash.length);
return packHash;
}
@Override
public void close() throws IOException {
deflater.end();
out.close();
file.close();
}
byte[] inflate(long filePos, int len) throws IOException, DataFormatException {
byte[] dstbuf;
try {
dstbuf = new byte[len];
} catch (OutOfMemoryError noMemory) {
return null; // Caller will switch to large object streaming.
}
byte[] srcbuf = buffer();
Inflater inf = inflater();
filePos += setInput(filePos, inf, srcbuf);
for (int dstoff = 0;;) {
int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
dstoff += n;
if (inf.finished()) {
return dstbuf;
}
if (inf.needsInput()) {
filePos += setInput(filePos, inf, srcbuf);
} else if (n == 0) {
throw new DataFormatException();
}
}
}
private int setInput(long filePos, Inflater inf, byte[] buf)
throws IOException {
if (file.getFilePointer() != filePos) {
file.seek(filePos);
}
int n = file.read(buf);
if (n < 0) {
throw new EOFException(JGitText.get().unexpectedEofInPack);
}
inf.setInput(buf, 0, n);
return n;
}
}
private class Reader extends ObjectReader {
private final ObjectReader ctx;
private Reader() {
ctx = db.newReader();
setStreamFileThreshold(ctx.getStreamFileThreshold());
}
@Override
public ObjectReader newReader() {
return db.newReader();
}
@Override
public ObjectInserter getCreatedFromInserter() {
return PackInserter.this;
}
@Override
public Collection<ObjectId> resolve(AbbreviatedObjectId id)
throws IOException {
Collection<ObjectId> stored = ctx.resolve(id);
if (objectList == null) {
return stored;
}
Set<ObjectId> r = new HashSet<>(stored.size() + 2);
r.addAll(stored);
for (PackedObjectInfo obj : objectList) {
if (id.prefixCompare(obj) == 0) {
r.add(obj.copy());
}
}
return r;
}
@Override
public ObjectLoader open(AnyObjectId objectId, int typeHint)
throws MissingObjectException, IncorrectObjectTypeException,
IOException {
if (objectMap == null) {
return ctx.open(objectId, typeHint);
}
PackedObjectInfo obj = objectMap.get(objectId);
if (obj == null) {
return ctx.open(objectId, typeHint);
}
byte[] buf = buffer();
RandomAccessFile f = packOut.file;
f.seek(obj.getOffset());
int cnt = f.read(buf, 0, 20);
if (cnt <= 0) {
throw new EOFException(JGitText.get().unexpectedEofInPack);
}
int c = buf[0] & 0xff;
int type = (c >> 4) & 7;
if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA) {
throw new IOException(MessageFormat.format(
JGitText.get().cannotReadBackDelta, Integer.toString(type)));
}
if (typeHint != OBJ_ANY && type != typeHint) {
throw new IncorrectObjectTypeException(objectId.copy(), typeHint);
}
long sz = c & 0x0f;
int ptr = 1;
int shift = 4;
while ((c & 0x80) != 0) {
if (ptr >= cnt) {
throw new EOFException(JGitText.get().unexpectedEofInPack);
}
c = buf[ptr++] & 0xff;
sz += ((long) (c & 0x7f)) << shift;
shift += 7;
}
long zpos = obj.getOffset() + ptr;
if (sz < getStreamFileThreshold()) {
byte[] data = inflate(obj, zpos, (int) sz);
if (data != null) {
return new ObjectLoader.SmallObject(type, data);
}
}
return new StreamLoader(f, type, sz, zpos);
}
private byte[] inflate(PackedObjectInfo obj, long zpos, int sz)
throws IOException, CorruptObjectException {
try {
return packOut.inflate(zpos, sz);
} catch (DataFormatException dfe) {
CorruptObjectException coe = new CorruptObjectException(
MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream,
Long.valueOf(obj.getOffset()),
tmpPack.getAbsolutePath()));
coe.initCause(dfe);
throw coe;
}
}
@Override
public Set<ObjectId> getShallowCommits() throws IOException {
return ctx.getShallowCommits();
}
@Override
public void close() {
ctx.close();
}
private class StreamLoader extends ObjectLoader {
private final RandomAccessFile file;
private final int type;
private final long size;
private final long pos;
StreamLoader(RandomAccessFile file, int type, long size, long pos) {
this.file = file;
this.type = type;
this.size = size;
this.pos = pos;
}
@Override
public ObjectStream openStream()
throws MissingObjectException, IOException {
int bufsz = buffer().length;
file.seek(pos);
return new ObjectStream.Filter(
type, size,
new BufferedInputStream(
new InflaterInputStream(
Channels.newInputStream(packOut.file.getChannel()),
inflater(), bufsz),
bufsz));
}
@Override
public int getType() {
return type;
}
@Override
public long getSize() {
return size;
}
@Override
public byte[] getCachedBytes() throws LargeObjectException {
throw new LargeObjectException.ExceedsLimit(
getStreamFileThreshold(), size);
}
}
}
}
Loading…
Cancel
Save