Merge "Fix javadoc of TooLargeObjectInPackException"
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
index 32002fd..17c1835 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
@@ -674,7 +674,7 @@
private boolean isObjectInPack(AnyObjectId id, DfsPackFile pack)
throws IOException {
- try (DfsReader reader = new DfsReader(odb)) {
+ try (DfsReader reader = odb.newReader()) {
return pack.hasObject(reader, id);
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java
index c817dc3..9b97eb4 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackWriterTest.java
@@ -711,7 +711,7 @@
}
ObjectWalk ow = walk.toObjectWalkWithSameObjects();
- pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have);
+ pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have, NONE);
String id = pw.computeName().getName();
File packdir = new File(repo.getObjectsDirectory(), "pack");
File packFile = new File(packdir, "pack-" + id + ".pack");
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java
index 20b8c51..d9b58e2 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/pack/GcCommitSelectionTest.java
@@ -69,6 +69,15 @@
@Test
public void testBitmapSpansNoMerges() throws Exception {
+ testBitmapSpansNoMerges(false);
+ }
+
+ @Test
+ public void testBitmapSpansNoMergesWithTags() throws Exception {
+ testBitmapSpansNoMerges(true);
+ }
+
+ private void testBitmapSpansNoMerges(boolean withTags) throws Exception {
/*
* Commit counts -> expected bitmap counts for history without merges.
* The top 100 contiguous commits should always have bitmaps, and the
@@ -89,7 +98,10 @@
assertTrue(nextCommitCount > currentCommits); // programming error
for (int i = currentCommits; i < nextCommitCount; i++) {
String str = "A" + i;
- bb.commit().message(str).add(str, str).create();
+ RevCommit rc = bb.commit().message(str).add(str, str).create();
+ if (withTags) {
+ tr.lightweightTag(str, rc);
+ }
}
currentCommits = nextCommitCount;
@@ -233,7 +245,7 @@
m8, m9);
PackWriterBitmapPreparer preparer = newPeparer(m9, commits);
List<BitmapCommit> selection = new ArrayList<>(
- preparer.selectCommits(commits.size()));
+ preparer.selectCommits(commits.size(), PackWriter.NONE));
// Verify that the output is ordered by the separate "chains"
String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(),
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index ef0b80c..6fff656 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -322,6 +322,7 @@
HashEntry e1 = table.get(slot);
DfsBlock v = scan(e1, key, position);
if (v != null) {
+ ctx.stats.blockCacheHit++;
statHit.incrementAndGet();
return v;
}
@@ -334,6 +335,7 @@
if (e2 != e1) {
v = scan(e2, key, position);
if (v != null) {
+ ctx.stats.blockCacheHit++;
statHit.incrementAndGet();
creditSpace(blockSize);
return v;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
index de447de..e9ec7e7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
@@ -53,6 +53,7 @@
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
+import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException;
import java.util.ArrayList;
@@ -111,7 +112,8 @@
private List<DfsPackFile> packsBefore;
private List<DfsPackFile> expiredGarbagePacks;
- private Set<ObjectId> allHeads;
+ private Set<ObjectId> allHeadsAndTags;
+ private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads;
private Set<ObjectId> txnHeads;
private Set<ObjectId> tagTargets;
@@ -241,23 +243,36 @@
Collection<Ref> refsBefore = getAllRefs();
readPacksBefore();
- allHeads = new HashSet<>();
+ Set<ObjectId> allHeads = new HashSet<>();
+ allHeadsAndTags = new HashSet<>();
+ allTags = new HashSet<>();
nonHeads = new HashSet<>();
txnHeads = new HashSet<>();
tagTargets = new HashSet<>();
for (Ref ref : refsBefore) {
- if (ref.isSymbolic() || ref.getObjectId() == null)
+ if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
- if (isHead(ref) || isTag(ref))
+ }
+ if (isHead(ref)) {
allHeads.add(ref.getObjectId());
- else if (RefTreeNames.isRefTree(refdb, ref.getName()))
+ } else if (isTag(ref)) {
+ allTags.add(ref.getObjectId());
+ } else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
- else
+ } else {
nonHeads.add(ref.getObjectId());
- if (ref.getPeeledObjectId() != null)
+ }
+ if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
+ }
}
- tagTargets.addAll(allHeads);
+ // Don't exclude tags that are also branch tips.
+ allTags.removeAll(allHeads);
+ allHeadsAndTags.addAll(allHeads);
+ allHeadsAndTags.addAll(allTags);
+
+ // Hoist all branch tips and tags earlier in the pack file
+ tagTargets.addAll(allHeadsAndTags);
boolean rollback = true;
try {
@@ -413,12 +428,12 @@
}
private void packHeads(ProgressMonitor pm) throws IOException {
- if (allHeads.isEmpty())
+ if (allHeadsAndTags.isEmpty())
return;
try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets);
- pw.preparePack(pm, allHeads, PackWriter.NONE);
+ pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
if (0 < pw.getObjectCount())
writePack(GC, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
@@ -432,7 +447,7 @@
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
- pw.preparePack(pm, nonHeads, allHeads);
+ pw.preparePack(pm, nonHeads, allHeadsAndTags);
if (0 < pw.getObjectCount())
writePack(GC_REST, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST));
@@ -446,7 +461,7 @@
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
- pw.preparePack(pm, txnHeads, PackWriter.NONE);
+ pw.preparePack(pm, txnHeads, NONE);
if (0 < pw.getObjectCount())
writePack(GC_TXN, pw, pm, 0 /* unknown pack size */);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
index fd72756..e65c9fd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
@@ -530,7 +530,7 @@
}
private class Reader extends ObjectReader {
- private final DfsReader ctx = new DfsReader(db);
+ private final DfsReader ctx = db.newReader();
@Override
public ObjectReader newReader() {
@@ -647,7 +647,7 @@
@Override
public ObjectStream openStream() throws IOException {
- final DfsReader ctx = new DfsReader(db);
+ final DfsReader ctx = db.newReader();
if (srcPack != packKey) {
try {
// Post DfsInserter.flush() use the normal code path.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
index b1cb72d..32ee6c2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
@@ -170,7 +170,7 @@
}
@Override
- public ObjectReader newReader() {
+ public DfsReader newReader() {
return new DfsReader(this);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index f15d427..ae2e7e4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -251,6 +251,8 @@
PackIndex idx;
try {
+ ctx.stats.readIdx++;
+ long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, INDEX);
try {
InputStream in = Channels.newInputStream(rc);
@@ -260,10 +262,11 @@
bs = (wantSize / bs) * bs;
else if (bs <= 0)
bs = wantSize;
- in = new BufferedInputStream(in, bs);
- idx = PackIndex.read(in);
+ idx = PackIndex.read(new BufferedInputStream(in, bs));
+ ctx.stats.readIdxBytes += rc.position();
} finally {
rc.close();
+ ctx.stats.readIdxMicros += elapsedMicros(start);
}
} catch (EOFException e) {
invalid = true;
@@ -286,6 +289,10 @@
}
}
+ private static long elapsedMicros(long start) {
+ return (System.nanoTime() - start) / 1000L;
+ }
+
final boolean isGarbage() {
return packDesc.getPackSource() == UNREACHABLE_GARBAGE;
}
@@ -314,6 +321,8 @@
long size;
PackBitmapIndex idx;
try {
+ ctx.stats.readBitmap++;
+ long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, BITMAP_INDEX);
try {
InputStream in = Channels.newInputStream(rc);
@@ -329,6 +338,8 @@
} finally {
size = rc.position();
rc.close();
+ ctx.stats.readIdxBytes += size;
+ ctx.stats.readIdxMicros += elapsedMicros(start);
}
} catch (EOFException e) {
IOException e2 = new IOException(MessageFormat.format(
@@ -777,6 +788,8 @@
if (invalid)
throw new PackInvalidException(getPackName());
+ ctx.stats.readBlock++;
+ long start = System.nanoTime();
ReadableChannel rc = ctx.db.openFile(packDesc, PACK);
try {
int size = blockSize(rc);
@@ -803,6 +816,7 @@
byte[] buf = new byte[size];
rc.position(pos);
int cnt = read(rc, ByteBuffer.wrap(buf, 0, size));
+ ctx.stats.readBlockBytes += cnt;
if (cnt != size) {
if (0 <= len) {
throw new EOFException(MessageFormat.format(
@@ -824,10 +838,10 @@
length = len = rc.size();
}
- DfsBlock v = new DfsBlock(key, pos, buf);
- return v;
+ return new DfsBlock(key, pos, buf);
} finally {
rc.close();
+ ctx.stats.readBlockMicros += elapsedMicros(start);
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
index 755b163..d611469 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
@@ -95,7 +95,7 @@
* See the base {@link ObjectReader} documentation for details. Notably, a
* reader is not thread safe.
*/
-public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
+public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private static final int MAX_RESOLVE_MATCHES = 256;
/** Temporary buffer large enough for at least one raw object id. */
@@ -104,17 +104,21 @@
/** Database this reader loads objects from. */
final DfsObjDatabase db;
+ final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
+
private Inflater inf;
-
private DfsBlock block;
-
private DeltaBaseCache baseCache;
-
private DfsPackFile last;
-
private boolean avoidUnreachable;
- DfsReader(DfsObjDatabase db) {
+ /**
+ * Initialize a new DfsReader
+ *
+ * @param db
+ * parent DfsObjDatabase.
+ */
+ protected DfsReader(DfsObjDatabase db) {
this.db = db;
this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
}
@@ -131,7 +135,7 @@
@Override
public ObjectReader newReader() {
- return new DfsReader(db);
+ return db.newReader();
}
@Override
@@ -170,6 +174,7 @@
PackList packList = db.getPackList();
resolveImpl(packList, id, matches);
if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
+ stats.scanPacks++;
resolveImpl(db.scanPacks(packList), id, matches);
}
return matches;
@@ -198,6 +203,7 @@
if (hasImpl(packList, objectId)) {
return true;
} else if (packList.dirty()) {
+ stats.scanPacks++;
return hasImpl(db.scanPacks(packList), objectId);
}
return false;
@@ -234,6 +240,7 @@
return checkType(ldr, objectId, typeHint);
}
if (packList.dirty()) {
+ stats.scanPacks++;
ldr = openImpl(db.scanPacks(packList), objectId);
if (ldr != null) {
return checkType(ldr, objectId, typeHint);
@@ -316,6 +323,7 @@
List<FoundObject<T>> r = new ArrayList<>();
findAllImpl(packList, pending, r);
if (!pending.isEmpty() && packList.dirty()) {
+ stats.scanPacks++;
findAllImpl(db.scanPacks(packList), pending, r);
}
for (T t : pending) {
@@ -452,7 +460,6 @@
final IOException findAllError = error;
return new AsyncObjectSizeQueue<T>() {
private FoundObject<T> cur;
-
private long sz;
@Override
@@ -718,9 +725,10 @@
for (int dstoff = 0;;) {
int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
dstoff += n;
- if (inf.finished() || (headerOnly && dstoff == dstbuf.length))
+ if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
+ stats.inflatedBytes += dstoff;
return dstoff;
- if (inf.needsInput()) {
+ } else if (inf.needsInput()) {
pin(pack, position);
position += block.setInput(position, inf);
} else if (n == 0)
@@ -764,6 +772,11 @@
block = null;
}
+ /** @return IO statistics accumulated by this reader. */
+ public DfsReaderIoStats getIoStats() {
+ return new DfsReaderIoStats(stats);
+ }
+
/** Release the current window cursor. */
@Override
public void close() {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
new file mode 100644
index 0000000..9a174c8
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+/** IO statistics for a {@link DfsReader}. */
+public class DfsReaderIoStats {
+ /** POJO to accumulate IO statistics. */
+ public static class Accumulator {
+ /** Number of times the reader explicitly called scanPacks. */
+ long scanPacks;
+
+ /** Total number of complete pack indexes read into memory. */
+ long readIdx;
+
+ /** Total number of complete bitmap indexes read into memory. */
+ long readBitmap;
+
+ /** Total number of bytes read from indexes. */
+ long readIdxBytes;
+
+ /** Total microseconds spent reading pack or bitmap indexes. */
+ long readIdxMicros;
+
+ /** Total number of block cache hits. */
+ long blockCacheHit;
+
+ /** Total number of discrete blocks read from pack file(s). */
+ long readBlock;
+
+ /** Total number of compressed bytes read as block sized units. */
+ long readBlockBytes;
+
+ /** Total microseconds spent reading {@link #readBlock} blocks. */
+ long readBlockMicros;
+
+ /** Total number of bytes decompressed. */
+ long inflatedBytes;
+
+ Accumulator() {
+ }
+ }
+
+ private final Accumulator stats;
+
+ DfsReaderIoStats(Accumulator stats) {
+ this.stats = stats;
+ }
+
+ /** @return number of times the reader explicitly called scanPacks. */
+ public long getScanPacks() {
+ return stats.scanPacks;
+ }
+
+ /** @return total number of complete pack indexes read into memory. */
+ public long getReadPackIndexCount() {
+ return stats.readIdx;
+ }
+
+ /** @return total number of complete bitmap indexes read into memory. */
+ public long getReadBitmapIndexCount() {
+ return stats.readBitmap;
+ }
+
+ /** @return total number of bytes read from indexes. */
+ public long getReadIndexBytes() {
+ return stats.readIdxBytes;
+ }
+
+ /** @return total microseconds spent reading pack or bitmap indexes. */
+ public long getReadIndexMicros() {
+ return stats.readIdxMicros;
+ }
+
+ /** @return total number of block cache hits. */
+ public long getBlockCacheHits() {
+ return stats.blockCacheHit;
+ }
+
+ /** @return total number of discrete blocks read from pack file(s). */
+ public long getReadBlocksCount() {
+ return stats.readBlock;
+ }
+
+ /** @return total number of compressed bytes read as block sized units. */
+ public long getReadBlocksBytes() {
+ return stats.readBlockBytes;
+ }
+
+ /** @return total microseconds spent reading blocks. */
+ public long getReadBlocksMicros() {
+ return stats.readBlockMicros;
+ }
+
+ /** @return total number of bytes decompressed. */
+ public long getInflatedBytes() {
+ return stats.inflatedBytes;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
index 6d40a75..73a93e6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/LargePackedWholeObject.java
@@ -99,7 +99,7 @@
@Override
public ObjectStream openStream() throws MissingObjectException, IOException {
- DfsReader ctx = new DfsReader(db);
+ DfsReader ctx = db.newReader();
InputStream in;
try {
in = new PackInputStream(pack, objectOffset + headerLength, ctx);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index c68e5f7..de81932 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -729,7 +729,9 @@
long time = System.currentTimeMillis();
Collection<Ref> refsBefore = getAllRefs();
+ Set<ObjectId> allHeadsAndTags = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>();
+ Set<ObjectId> allTags = new HashSet<>();
Set<ObjectId> nonHeads = new HashSet<>();
Set<ObjectId> txnHeads = new HashSet<>();
Set<ObjectId> tagTargets = new HashSet<>();
@@ -739,16 +741,21 @@
for (Ref ref : refsBefore) {
checkCancelled();
nonHeads.addAll(listRefLogObjects(ref, 0));
- if (ref.isSymbolic() || ref.getObjectId() == null)
+ if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
- if (isHead(ref) || isTag(ref))
+ }
+ if (isHead(ref)) {
allHeads.add(ref.getObjectId());
- else if (RefTreeNames.isRefTree(refdb, ref.getName()))
+ } else if (isTag(ref)) {
+ allTags.add(ref.getObjectId());
+ } else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
- else
+ } else {
nonHeads.add(ref.getObjectId());
- if (ref.getPeeledObjectId() != null)
+ }
+ if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
+ }
}
List<ObjectIdSet> excluded = new LinkedList<>();
@@ -758,13 +765,19 @@
excluded.add(f.getIndex());
}
- tagTargets.addAll(allHeads);
+ // Don't exclude tags that are also branch tips
+ allTags.removeAll(allHeads);
+ allHeadsAndTags.addAll(allHeads);
+ allHeadsAndTags.addAll(allTags);
+
+ // Hoist all branch tips and tags earlier in the pack file
+ tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects);
List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null;
- if (!allHeads.isEmpty()) {
- heads = writePack(allHeads, Collections.<ObjectId> emptySet(),
+ if (!allHeadsAndTags.isEmpty()) {
+ heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
tagTargets, excluded);
if (heads != null) {
ret.add(heads);
@@ -772,12 +785,14 @@
}
}
if (!nonHeads.isEmpty()) {
- PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded);
+ PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
+ tagTargets, excluded);
if (rest != null)
ret.add(rest);
}
if (!txnHeads.isEmpty()) {
- PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded);
+ PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
+ null, excluded);
if (txn != null)
ret.add(txn);
}
@@ -961,8 +976,9 @@
}
private PackFile writePack(@NonNull Set<? extends ObjectId> want,
- @NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets,
- List<ObjectIdSet> excludeObjects) throws IOException {
+ @NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
+ Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
+ throws IOException {
checkCancelled();
File tmpPack = null;
Map<PackExt, File> tmpExts = new TreeMap<>(
@@ -988,12 +1004,13 @@
// prepare the PackWriter
pw.setDeltaBaseAsOffset(true);
pw.setReuseDeltaCommits(false);
- if (tagTargets != null)
+ if (tagTargets != null) {
pw.setTagTargets(tagTargets);
+ }
if (excludeObjects != null)
for (ObjectIdSet idx : excludeObjects)
pw.excludeObjects(idx);
- pw.preparePack(pm, want, have);
+ pw.preparePack(pm, want, have, PackWriter.NONE, tags);
if (pw.getObjectCount() == 0)
return null;
checkCancelled();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
index 8810a9f..7271560 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
@@ -233,7 +233,9 @@
private List<CachedPack> cachedPacks = new ArrayList<>(2);
- private Set<ObjectId> tagTargets = Collections.emptySet();
+ private Set<ObjectId> tagTargets = NONE;
+
+ private Set<? extends ObjectId> excludeFromBitmapSelection = NONE;
private ObjectIdSet[] excludeInPacks;
@@ -712,8 +714,7 @@
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException {
- preparePack(countingMonitor,
- want, have, Collections.<ObjectId> emptySet());
+ preparePack(countingMonitor, want, have, NONE, NONE);
}
/**
@@ -721,9 +722,9 @@
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
- * The caller is responsible for filtering out commits that should not
- * be shallow any more ("unshallow" commits as in {@link #setShallowPack})
- * from the shallow set.
+ * The caller is responsible for filtering out commits that should not be
+ * shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
+ * the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
@@ -731,27 +732,67 @@
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
- * objects whose ancestors (up to and including
- * {@code shallow} commits) do not need to be included in the
- * pack because they are already available from elsewhere.
- * Must not be {@code null}.
+ * objects whose ancestors (up to and including {@code shallow}
+ * commits) do not need to be included in the pack because they
+ * are already available from elsewhere. Must not be
+ * {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
- * {@code have}. Shallow commits have parents but those
- * parents are considered not to be already available.
- * Parents of {@code shallow} commits and earlier generations
- * will be included in the pack if requested by {@code want}.
- * Must not be {@code null}.
+ * {@code have}. Shallow commits have parents but those parents
+ * are considered not to be already available. Parents of
+ * {@code shallow} commits and earlier generations will be
+ * included in the pack if requested by {@code want}. Must not be
+ * {@code null}.
* @throws IOException
- * an I/O problem occured while reading objects.
+ * an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow) throws IOException {
+ preparePack(countingMonitor, want, have, shallow, NONE);
+ }
+
+ /**
+ * Prepare the list of objects to be written to the pack stream.
+ * <p>
+ * Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
+ * specifying commits that should not be walked past ("shallow" commits).
+ * The caller is responsible for filtering out commits that should not be
+ * shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
+ * the shallow set.
+ *
+ * @param countingMonitor
+ * progress during object enumeration.
+ * @param want
+ * objects of interest, ancestors of which will be included in
+ * the pack. Must not be {@code null}.
+ * @param have
+ * objects whose ancestors (up to and including {@code shallow}
+ * commits) do not need to be included in the pack because they
+ * are already available from elsewhere. Must not be
+ * {@code null}.
+ * @param shallow
+ * commits indicating the boundary of the history marked with
+ * {@code have}. Shallow commits have parents but those parents
+ * are considered not to be already available. Parents of
+ * {@code shallow} commits and earlier generations will be
+ * included in the pack if requested by {@code want}. Must not be
+ * {@code null}.
+ * @param noBitmaps
+ * collection of objects to be excluded from bitmap commit
+ * selection.
+ * @throws IOException
+ * an I/O problem occurred while reading objects.
+ */
+ public void preparePack(ProgressMonitor countingMonitor,
+ @NonNull Set<? extends ObjectId> want,
+ @NonNull Set<? extends ObjectId> have,
+ @NonNull Set<? extends ObjectId> shallow,
+ @NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
try (ObjectWalk ow = getObjectWalk()) {
ow.assumeShallow(shallow);
- preparePack(countingMonitor, ow, want, have);
+ preparePack(countingMonitor, ow, want, have, noBitmaps);
}
}
@@ -784,13 +825,17 @@
* points of graph traversal). Pass {@link #NONE} if all objects
* reachable from {@code want} are desired, such as when serving
* a clone.
+ * @param noBitmaps
+ * collection of objects to be excluded from bitmap commit
+ * selection.
* @throws IOException
* when some I/O problem occur during reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull ObjectWalk walk,
@NonNull Set<? extends ObjectId> interestingObjects,
- @NonNull Set<? extends ObjectId> uninterestingObjects)
+ @NonNull Set<? extends ObjectId> uninterestingObjects,
+ @NonNull Set<? extends ObjectId> noBitmaps)
throws IOException {
if (countingMonitor == null)
countingMonitor = NullProgressMonitor.INSTANCE;
@@ -798,7 +843,7 @@
throw new IllegalArgumentException(
JGitText.get().shallowPacksRequireDepthWalk);
findObjectsToPack(countingMonitor, walk, interestingObjects,
- uninterestingObjects);
+ uninterestingObjects, noBitmaps);
}
/**
@@ -965,8 +1010,9 @@
/**
* Write the prepared pack to the supplied stream.
* <p>
- * Called after {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set)}
- * or {@link #preparePack(ProgressMonitor, Set, Set)}.
+ * Called after
+ * {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set, Set)} or
+ * {@link #preparePack(ProgressMonitor, Set, Set)}.
* <p>
* Performs delta search if enabled and writes the pack stream.
* <p>
@@ -1652,12 +1698,14 @@
private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor,
@NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want,
- @NonNull Set<? extends ObjectId> have) throws IOException {
+ @NonNull Set<? extends ObjectId> have,
+ @NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
final long countingStart = System.currentTimeMillis();
beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN);
stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want));
stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have));
+ excludeFromBitmapSelection = noBitmaps;
canBuildBitmaps = config.isBuildBitmaps()
&& !shallowPack
@@ -2070,8 +2118,8 @@
PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer(
reader, writeBitmaps, pm, stats.interestingObjects, config);
- Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits =
- bitmapPreparer.selectCommits(numCommits);
+ Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = bitmapPreparer
+ .selectCommits(numCommits, excludeFromBitmapSelection);
beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size());
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java
index 07a03b4..8bedddb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java
@@ -141,6 +141,8 @@
*
* @param expectedCommitCount
* count of commits in the pack
+ * @param excludeFromBitmapSelection
+ * commits that should be excluded from bitmap selection
* @return commit objects for which bitmap indices should be built
* @throws IncorrectObjectTypeException
* if any of the processed objects is not a commit
@@ -149,7 +151,8 @@
* @throws MissingObjectException
* if an expected object is missing
*/
- Collection<BitmapCommit> selectCommits(int expectedCommitCount)
+ Collection<BitmapCommit> selectCommits(int expectedCommitCount,
+ Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
/*
@@ -164,7 +167,7 @@
RevWalk rw = new RevWalk(reader);
rw.setRetainBody(false);
CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw,
- expectedCommitCount);
+ expectedCommitCount, excludeFromBitmapSelection);
pm.endTask();
int totCommits = selectionHelper.getCommitCount();
@@ -363,6 +366,8 @@
* @param expectedCommitCount
* expected count of commits. The actual count may be less due to
* unreachable garbage.
+ * @param excludeFromBitmapSelection
+ * commits that should be excluded from bitmap selection
* @return a {@link CommitSelectionHelper} containing bitmaps for the tip
* commits
* @throws IncorrectObjectTypeException
@@ -373,8 +378,10 @@
* if an expected object is missing
*/
private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
- int expectedCommitCount) throws IncorrectObjectTypeException,
- IOException, MissingObjectException {
+ int expectedCommitCount,
+ Set<? extends ObjectId> excludeFromBitmapSelection)
+ throws IncorrectObjectTypeException, IOException,
+ MissingObjectException {
BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder();
List<BitmapCommit> reuseCommits = new ArrayList<>();
for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) {
@@ -403,7 +410,8 @@
Set<RevCommit> peeledWant = new HashSet<>(want.size());
for (AnyObjectId objectId : want) {
RevObject ro = rw.peel(rw.parseAny(objectId));
- if (!(ro instanceof RevCommit) || reuse.contains(ro)) {
+ if (!(ro instanceof RevCommit) || reuse.contains(ro)
+ || excludeFromBitmapSelection.contains(ro)) {
continue;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
index ddb2fbf..17af0b9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
@@ -1523,7 +1523,7 @@
walk.reset();
ObjectWalk ow = rw.toObjectWalkWithSameObjects();
- pw.preparePack(pm, ow, wantAll, commonBase);
+ pw.preparePack(pm, ow, wantAll, commonBase, PackWriter.NONE);
rw = ow;
}