Mercurial > jhg
comparison src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java @ 425:48f993aa2f41
FIXMEs: exceptions, javadoc
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Wed, 28 Mar 2012 18:39:29 +0200 |
| parents | 9c9c442b5f2e |
| children | cd658b24a620 |
comparison
equal
deleted
inserted
replaced
| 424:6437d261048a | 425:48f993aa2f41 |
|---|---|
| 28 import java.util.Collections; | 28 import java.util.Collections; |
| 29 import java.util.NoSuchElementException; | 29 import java.util.NoSuchElementException; |
| 30 import java.util.Set; | 30 import java.util.Set; |
| 31 import java.util.TreeSet; | 31 import java.util.TreeSet; |
| 32 | 32 |
| 33 import org.tmatesoft.hg.core.HgException; | |
| 34 import org.tmatesoft.hg.core.Nodeid; | 33 import org.tmatesoft.hg.core.Nodeid; |
| 34 import org.tmatesoft.hg.core.SessionContext; | |
| 35 import org.tmatesoft.hg.internal.ByteArrayChannel; | 35 import org.tmatesoft.hg.internal.ByteArrayChannel; |
| 36 import org.tmatesoft.hg.internal.Experimental; | 36 import org.tmatesoft.hg.internal.Experimental; |
| 37 import org.tmatesoft.hg.internal.FilterByteChannel; | 37 import org.tmatesoft.hg.internal.FilterByteChannel; |
| 38 import org.tmatesoft.hg.internal.Internals; | 38 import org.tmatesoft.hg.internal.Internals; |
| 39 import org.tmatesoft.hg.internal.ManifestRevision; | 39 import org.tmatesoft.hg.internal.ManifestRevision; |
| 160 * @throws CancelledException if operation execution was cancelled | 160 * @throws CancelledException if operation execution was cancelled |
| 161 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em> | 161 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em> |
| 162 */ | 162 */ |
| 163 public void walk(int baseRevision, HgStatusInspector inspector) throws IOException, CancelledException, HgRuntimeException { | 163 public void walk(int baseRevision, HgStatusInspector inspector) throws IOException, CancelledException, HgRuntimeException { |
| 164 if (HgInternals.wrongRevisionIndex(baseRevision) || baseRevision == BAD_REVISION) { | 164 if (HgInternals.wrongRevisionIndex(baseRevision) || baseRevision == BAD_REVISION) { |
| 165 throw new IllegalArgumentException(String.valueOf(baseRevision)); | 165 throw new HgInvalidRevisionException(baseRevision); |
| 166 } | 166 } |
| 167 if (getDirstateImpl() == null) { | 167 if (getDirstateImpl() == null) { |
| 168 getDirstate(); | 168 getDirstate(); |
| 169 } | 169 } |
| 170 if (getDirstateParentManifest() == null) { | 170 if (getDirstateParentManifest() == null) { |
| 344 inspector.modified(df.getPath()); | 344 inspector.modified(df.getPath()); |
| 345 } else { | 345 } else { |
| 346 inspector.clean(df.getPath()); | 346 inspector.clean(df.getPath()); |
| 347 } | 347 } |
| 348 } | 348 } |
| 349 } catch (HgException ex) { | 349 } catch (HgRuntimeException ex) { |
| 350 repo.getContext().getLog().warn(getClass(), ex, null); | 350 repo.getContext().getLog().warn(getClass(), ex, null); |
| 351 inspector.invalid(fname, ex); | 351 inspector.invalid(fname, ex); |
| 352 } | 352 } |
| 353 } | 353 } |
| 354 } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { | 354 } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { |
| 379 Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); | 379 Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); |
| 380 if (origin != null) { | 380 if (origin != null) { |
| 381 inspector.copied(getPathPool().path(origin), fname); | 381 inspector.copied(getPathPool().path(origin), fname); |
| 382 return; | 382 return; |
| 383 } | 383 } |
| 384 } catch (HgException ex) { | 384 } catch (HgInvalidFileException ex) { |
| 385 // report failure and continue status collection | 385 // report failure and continue status collection |
| 386 inspector.invalid(fname, ex); | 386 inspector.invalid(fname, ex); |
| 387 } | 387 } |
| 388 } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { | 388 } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { |
| 389 if (r.copySource() != null && baseRevNames.contains(r.copySource())) { | 389 if (r.copySource() != null && baseRevNames.contains(r.copySource())) { |
| 433 if (areTheSame(f, fileNode, nid1)) { | 433 if (areTheSame(f, fileNode, nid1)) { |
| 434 inspector.clean(fname); | 434 inspector.clean(fname); |
| 435 } else { | 435 } else { |
| 436 inspector.modified(fname); | 436 inspector.modified(fname); |
| 437 } | 437 } |
| 438 } catch (HgException ex) { | 438 } catch (HgRuntimeException ex) { |
| 439 repo.getContext().getLog().warn(getClass(), ex, null); | 439 repo.getContext().getLog().warn(getClass(), ex, null); |
| 440 inspector.invalid(fname, ex); | 440 inspector.invalid(fname, ex); |
| 441 } | 441 } |
| 442 baseRevNames.remove(fname); // consumed, processed, handled. | 442 baseRevNames.remove(fname); // consumed, processed, handled. |
| 443 } else if (getDirstateImpl().checkRemoved(fname) != null) { | 443 } else if (getDirstateImpl().checkRemoved(fname) != null) { |
| 454 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest | 454 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest |
| 455 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). | 455 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). |
| 456 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' | 456 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' |
| 457 } | 457 } |
| 458 | 458 |
| 459 private boolean areTheSame(FileInfo f, HgDataFile dataFile, Nodeid revision) throws HgException, HgInvalidFileException { | 459 private boolean areTheSame(FileInfo f, HgDataFile dataFile, Nodeid revision) throws HgInvalidFileException { |
| 460 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison | 460 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison |
| 461 ByteArrayChannel bac = new ByteArrayChannel(); | 461 ByteArrayChannel bac = new ByteArrayChannel(); |
| 462 try { | 462 try { |
| 463 int fileRevisionIndex = dataFile.getRevisionIndex(revision); | 463 int fileRevisionIndex = dataFile.getRevisionIndex(revision); |
| 464 // need content with metadata striped off - although theoretically chances are metadata may be different, | 464 // need content with metadata striped off - although theoretically chances are metadata may be different, |
| 609 } | 609 } |
| 610 } | 610 } |
| 611 // final Path[] dirs = f.toArray(new Path[d.size()]); | 611 // final Path[] dirs = f.toArray(new Path[d.size()]); |
| 612 if (d.isEmpty()) { | 612 if (d.isEmpty()) { |
| 613 final Path[] files = f.toArray(new Path[f.size()]); | 613 final Path[] files = f.toArray(new Path[f.size()]); |
| 614 FileIterator fi = new FileListIterator(hgRepo.getWorkingDir(), files); | 614 FileIterator fi = new FileListIterator(hgRepo.getContext(), hgRepo.getWorkingDir(), files); |
| 615 return new HgWorkingCopyStatusCollector(hgRepo, fi); | 615 return new HgWorkingCopyStatusCollector(hgRepo, fi); |
| 616 } | 616 } |
| 617 // | 617 // |
| 618 | 618 |
| 619 //FileIterator fi = file.isDirectory() ? new DirFileIterator(hgRepo, file) : new FileListIterator(, file); | 619 //FileIterator fi = file.isDirectory() ? new DirFileIterator(hgRepo, file) : new FileListIterator(, file); |
| 646 private final File dir; | 646 private final File dir; |
| 647 private final Path[] paths; | 647 private final Path[] paths; |
| 648 private int index; | 648 private int index; |
| 649 private RegularFileInfo nextFile; | 649 private RegularFileInfo nextFile; |
| 650 private final boolean execCap, linkCap; | 650 private final boolean execCap, linkCap; |
| 651 | 651 private final SessionContext sessionContext; |
| 652 public FileListIterator(File startDir, Path... files) { | 652 |
| 653 public FileListIterator(SessionContext ctx, File startDir, Path... files) { | |
| 654 sessionContext = ctx; | |
| 653 dir = startDir; | 655 dir = startDir; |
| 654 paths = files; | 656 paths = files; |
| 655 reset(); | 657 reset(); |
| 656 execCap = Internals.checkSupportsExecutables(startDir); | 658 execCap = Internals.checkSupportsExecutables(startDir); |
| 657 linkCap = Internals.checkSupportsSymlinks(startDir); | 659 linkCap = Internals.checkSupportsSymlinks(startDir); |
| 658 } | 660 } |
| 659 | 661 |
| 660 public void reset() { | 662 public void reset() { |
| 661 index = -1; | 663 index = -1; |
| 662 nextFile = new RegularFileInfo(execCap, linkCap); | 664 nextFile = new RegularFileInfo(sessionContext, execCap, linkCap); |
| 663 } | 665 } |
| 664 | 666 |
| 665 public boolean hasNext() { | 667 public boolean hasNext() { |
| 666 return paths.length > 0 && index < paths.length-1; | 668 return paths.length > 0 && index < paths.length-1; |
| 667 } | 669 } |
