Mercurial > jhg
comparison src/org/tmatesoft/hg/repo/HgBranches.java @ 659:a5cf64f2e7e4 smartgit-4.6
Poor performance when reading/collecting branch information. Respect new cache location for recent mercurial revisions. Use different algorithm to build branch cache
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Fri, 05 Jul 2013 20:42:45 +0200 |
| parents | 6526d8adbc0f |
| children | af5223b86dd3 |
comparison
equal
deleted
inserted
replaced
| 641:2f33f102a8fa | 659:a5cf64f2e7e4 |
|---|---|
| 25 import java.io.FileReader; | 25 import java.io.FileReader; |
| 26 import java.io.FileWriter; | 26 import java.io.FileWriter; |
| 27 import java.io.IOException; | 27 import java.io.IOException; |
| 28 import java.util.ArrayList; | 28 import java.util.ArrayList; |
| 29 import java.util.Arrays; | 29 import java.util.Arrays; |
| 30 import java.util.Collections; | |
| 31 import java.util.HashMap; | 30 import java.util.HashMap; |
| 31 import java.util.Iterator; | |
| 32 import java.util.LinkedHashMap; | 32 import java.util.LinkedHashMap; |
| 33 import java.util.LinkedHashSet; | 33 import java.util.LinkedHashSet; |
| 34 import java.util.LinkedList; | 34 import java.util.LinkedList; |
| 35 import java.util.List; | 35 import java.util.List; |
| 36 import java.util.Map; | 36 import java.util.Map; |
| 44 import org.tmatesoft.hg.internal.Internals; | 44 import org.tmatesoft.hg.internal.Internals; |
| 45 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; | 45 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; |
| 46 import org.tmatesoft.hg.util.ProgressSupport; | 46 import org.tmatesoft.hg.util.ProgressSupport; |
| 47 | 47 |
| 48 /** | 48 /** |
| 49 * | 49 * Access information about branches in the repository |
| 50 * | |
| 50 * @author Artem Tikhomirov | 51 * @author Artem Tikhomirov |
| 51 * @author TMate Software Ltd. | 52 * @author TMate Software Ltd. |
| 52 */ | 53 */ |
| 53 public class HgBranches { | 54 public class HgBranches { |
| 54 | 55 |
| 123 } | 124 } |
| 124 | 125 |
| 125 void collect(final ProgressSupport ps) throws HgRuntimeException { | 126 void collect(final ProgressSupport ps) throws HgRuntimeException { |
| 126 branches.clear(); | 127 branches.clear(); |
| 127 final HgRepository repo = internalRepo.getRepo(); | 128 final HgRepository repo = internalRepo.getRepo(); |
| 128 ps.start(1 + repo.getChangelog().getRevisionCount() * 2); | 129 final HgChangelog clog = repo.getChangelog(); |
| 130 ps.start(1 + clog.getRevisionCount() * 2); | |
| 129 // | 131 // |
| 130 int lastCached = readCache(); | 132 int lastCached = readCache(); |
| 131 isCacheActual = lastCached == repo.getChangelog().getLastRevision(); | 133 isCacheActual = lastCached == clog.getLastRevision(); |
| 132 if (!isCacheActual) { | 134 if (!isCacheActual) { |
| 133 final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(repo.getChangelog()); | 135 // XXX need a way to share HgParentChildMap<HgChangelog> |
| 136 final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(clog); | |
| 134 pw.init(); | 137 pw.init(); |
| 135 ps.worked(repo.getChangelog().getRevisionCount()); | 138 ps.worked(clog.getRevisionCount()); |
| 139 // | |
| 136 // first revision branch found at | 140 // first revision branch found at |
| 137 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>(); | 141 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>(); |
| 138 // last revision seen for the branch | |
| 139 final HashMap<String, Nodeid> branchLastSeen = new HashMap<String, Nodeid>(); | |
| 140 // revisions from the branch that have no children at all | 142 // revisions from the branch that have no children at all |
| 141 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>(); | 143 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>(); |
| 142 // revisions that are immediate children of a node from a given branch | |
| 143 // after iteration, there are some revisions left in this map (children of a branch last revision | |
| 144 // that doesn't belong to the branch. No use of this now, perhaps can deduce isInactive (e.g.those | |
| 145 // branches that have non-empty candidates are inactive if all their heads are roots for those left) | |
| 146 final HashMap<String, List<Nodeid>> branchHeadCandidates = new HashMap<String, List<Nodeid>>(); | |
| 147 HgChangelog.Inspector insp = new HgChangelog.Inspector() { | 144 HgChangelog.Inspector insp = new HgChangelog.Inspector() { |
| 145 | |
| 146 private final ArrayList<Nodeid> parents = new ArrayList<Nodeid>(3); | |
| 148 | 147 |
| 149 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { | 148 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { |
| 150 String branchName = cset.branch(); | 149 String branchName = cset.branch(); |
| 150 List<Nodeid> _branchHeads; | |
| 151 // there are chances (with --force key) branch can get more than one start | |
| 152 // revision. Neither BranchInfo nor this code support this scenario at the moment. | |
| 151 if (!branchStart.containsKey(branchName)) { | 153 if (!branchStart.containsKey(branchName)) { |
| 152 branchStart.put(branchName, nodeid); | 154 branchStart.put(branchName, nodeid); |
| 153 branchHeads.put(branchName, new LinkedList<Nodeid>()); | 155 branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>()); |
| 154 branchHeadCandidates.put(branchName, new LinkedList<Nodeid>()); | |
| 155 } else { | 156 } else { |
| 156 final List<Nodeid> headCandidates = branchHeadCandidates.get(branchName); | 157 _branchHeads = branchHeads.get(branchName); |
| 157 if (headCandidates.remove(nodeid)) { | 158 if (_branchHeads == null) { |
| 158 // likely we don't need to keep parent anymore, as we found at least 1 child thereof to be at the same branch | 159 branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>()); |
| 159 // however, it's possible the child we found is a result of an earlier fork, and revision in the | 160 } |
| 160 // branchLastSeen is 'parallel' head, which needs to be kept | 161 } |
| 161 Nodeid lastSeenInBranch = branchLastSeen.get(branchName); | 162 // so far present node is the best candidate for head |
| 162 // check if current revision is on descendant line. Seems direct parents check is enough | 163 _branchHeads.add(nodeid); |
| 163 if (pw.safeFirstParent(nodeid).equals(lastSeenInBranch) || pw.safeSecondParent(nodeid).equals(lastSeenInBranch)) { | 164 parents.clear(); |
| 164 branchLastSeen.remove(branchName); | 165 // parents of this node, however, cease to be heads (if they are from this branch) |
| 166 pw.appendParentsOf(nodeid, parents); | |
| 167 _branchHeads.removeAll(parents); | |
| 168 ps.worked(1); | |
| 169 } | |
| 170 }; | |
| 171 // XXX alternatively may iterate with pw.all().subList(lastCached) | |
| 172 // but need an effective way to find out branch of particular changeset | |
| 173 clog.range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp); | |
| 174 // | |
| 175 // build BranchInfo, based on found and cached | |
| 176 for (String bn : branchStart.keySet()) { | |
| 177 BranchInfo bi = branches.get(bn); | |
| 178 if (bi != null) { | |
| 179 // combine heads found so far with those cached | |
| 180 LinkedHashSet<Nodeid> oldHeads = new LinkedHashSet<Nodeid>(bi.getHeads()); | |
| 181 // expect size of both oldHeads and newHeads sets to be small, and for x for hence acceptable. | |
| 182 for (Nodeid newHead : branchHeads.get(bn)) { | |
| 183 for (Iterator<Nodeid> it = oldHeads.iterator(); it.hasNext();) { | |
| 184 if (pw.isChild(it.next(), newHead)) { | |
| 185 it.remove(); | |
| 165 } | 186 } |
| 166 } | 187 } |
| 167 } | 188 } |
| 168 List<Nodeid> immediateChildren = pw.directChildren(nodeid); | 189 oldHeads.addAll(branchHeads.get(bn)); |
| 169 if (immediateChildren.size() > 0) { | 190 assert oldHeads.size() > 0; |
| 170 // 1) children may be in another branch | 191 bi = new BranchInfo(bn, bi.getStart(), oldHeads.toArray(new Nodeid[oldHeads.size()])); |
| 171 // and unless we later came across another element from this branch, | |
| 172 // we need to record all these as potential heads | |
| 173 // | |
| 174 // 2) head1 with children in different branch, and head2 in this branch without children | |
| 175 branchLastSeen.put(branchName, nodeid); | |
| 176 branchHeadCandidates.get(branchName).addAll(immediateChildren); | |
| 177 } else { | |
| 178 // no more children known for this node, it's (one of the) head of the branch | |
| 179 branchHeads.get(branchName).add(nodeid); | |
| 180 } | |
| 181 ps.worked(1); | |
| 182 } | |
| 183 }; | |
| 184 repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp); | |
| 185 // those last seen revisions from the branch that had no children from the same branch are heads. | |
| 186 for (String bn : branchLastSeen.keySet()) { | |
| 187 // these are inactive branches? - there were children, but not from the same branch? | |
| 188 branchHeads.get(bn).add(branchLastSeen.get(bn)); | |
| 189 } | |
| 190 for (String bn : branchStart.keySet()) { | |
| 191 BranchInfo bi = branches.get(bn); | |
| 192 if (bi != null) { | |
| 193 // although heads from cache shall not intersect with heads after lastCached, | |
| 194 // use of LHS doesn't hurt (and makes sense e.g. if cache is not completely correct in my tests) | |
| 195 LinkedHashSet<Nodeid> heads = new LinkedHashSet<Nodeid>(bi.getHeads()); | |
| 196 for (Nodeid oldHead : bi.getHeads()) { | |
| 197 // XXX perhaps, need pw.canReach(Nodeid from, Collection<Nodeid> to) | |
| 198 List<Nodeid> newChildren = pw.childrenOf(Collections.singletonList(oldHead)); | |
| 199 if (!newChildren.isEmpty()) { | |
| 200 // likely not a head any longer, | |
| 201 // check if any new head can be reached from old one, and, if yes, | |
| 202 // do not consider that old head as head. | |
| 203 for (Nodeid newHead : branchHeads.get(bn)) { | |
| 204 if (newChildren.contains(newHead)) { | |
| 205 heads.remove(oldHead); | |
| 206 break; | |
| 207 } | |
| 208 } | |
| 209 } // else - oldHead still head for the branch | |
| 210 } | |
| 211 heads.addAll(branchHeads.get(bn)); | |
| 212 bi = new BranchInfo(bn, bi.getStart(), heads.toArray(new Nodeid[0])); | |
| 213 } else { | 192 } else { |
| 214 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]); | 193 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]); |
| 215 bi = new BranchInfo(bn, branchStart.get(bn), heads); | 194 bi = new BranchInfo(bn, branchStart.get(bn), heads); |
| 216 } | 195 } |
| 217 branches.put(bn, bi); | 196 branches.put(bn, bi); |
| 218 } | 197 } |
| 219 } | 198 } |
| 220 final HgChangelog clog = repo.getChangelog(); | |
| 221 final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init(); | 199 final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init(); |
| 222 for (BranchInfo bi : branches.values()) { | 200 for (BranchInfo bi : branches.values()) { |
| 223 bi.validate(clog, rmap); | 201 bi.validate(clog, rmap); |
| 224 } | 202 } |
| 225 repoChangeTracker.touch(); | 203 repoChangeTracker.touch(); |
| 273 bw.close(); | 251 bw.close(); |
| 274 } | 252 } |
| 275 | 253 |
| 276 private File getCacheFile() { | 254 private File getCacheFile() { |
| 277 // prior to 1.8 used to be .hg/branchheads.cache | 255 // prior to 1.8 used to be .hg/branchheads.cache |
| 256 // since 2.5 there is filter suffix | |
| 257 File f = internalRepo.getFileFromRepoDir("cache/branchheads-base"); | |
| 258 if (f.exists()) { | |
| 259 return f; | |
| 260 } | |
| 278 return internalRepo.getFileFromRepoDir("cache/branchheads"); | 261 return internalRepo.getFileFromRepoDir("cache/branchheads"); |
| 279 } | 262 } |
| 280 | 263 |
| 281 /*package-local*/ void reloadIfChanged(ProgressSupport ps) throws HgRuntimeException { | 264 /*package-local*/ void reloadIfChanged(ProgressSupport ps) throws HgRuntimeException { |
| 282 if (repoChangeTracker.isChanged()) { | 265 if (repoChangeTracker.isChanged()) { |
| 386 } | 369 } |
| 387 return closedHeads.contains(head); | 370 return closedHeads.contains(head); |
| 388 } | 371 } |
| 389 // public Nodeid getTip() { | 372 // public Nodeid getTip() { |
| 390 // } | 373 // } |
| 374 // XXX Not public as there are chances for few possible branch starts, and I need to decide how to handle that | |
| 391 /*public*/ Nodeid getStart() { | 375 /*public*/ Nodeid getStart() { |
| 392 // first node where branch appears | 376 // first node where branch appears |
| 393 return start; | 377 return start; |
| 394 } | 378 } |
| 395 } | 379 } |
