Mercurial > jhg
comparison src/org/tmatesoft/hg/internal/diff/BlameHelper.java @ 703:7839ff0bfd78
Refactor: move diff/blame related code to a separate package
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Wed, 14 Aug 2013 14:51:51 +0200 |
| parents | src/org/tmatesoft/hg/internal/BlameHelper.java@58a6900f845d |
| children | 497e697636fc |
comparison
equal
deleted
inserted
replaced
| 702:992fa84e7885 | 703:7839ff0bfd78 |
|---|---|
| 1 /* | |
| 2 * Copyright (c) 2013 TMate Software Ltd | |
| 3 * | |
| 4 * This program is free software; you can redistribute it and/or modify | |
| 5 * it under the terms of the GNU General Public License as published by | |
| 6 * the Free Software Foundation; version 2 of the License. | |
| 7 * | |
| 8 * This program is distributed in the hope that it will be useful, | |
| 9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
| 10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
| 11 * GNU General Public License for more details. | |
| 12 * | |
| 13 * For information on how to redistribute this software under | |
| 14 * the terms of a license other than GNU General Public License | |
| 15 * contact TMate Software at support@hg4j.com | |
| 16 */ | |
| 17 package org.tmatesoft.hg.internal.diff; | |
| 18 | |
| 19 import static org.tmatesoft.hg.core.HgIterateDirection.OldToNew; | |
| 20 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; | |
| 21 | |
| 22 import java.util.ArrayList; | |
| 23 import java.util.Arrays; | |
| 24 import java.util.Iterator; | |
| 25 import java.util.LinkedList; | |
| 26 import java.util.List; | |
| 27 import java.util.ListIterator; | |
| 28 | |
| 29 import org.tmatesoft.hg.core.HgCallbackTargetException; | |
| 30 import org.tmatesoft.hg.core.Nodeid; | |
| 31 import org.tmatesoft.hg.internal.ByteArrayChannel; | |
| 32 import org.tmatesoft.hg.internal.FileHistory; | |
| 33 import org.tmatesoft.hg.internal.FileRevisionHistoryChunk; | |
| 34 import org.tmatesoft.hg.internal.IntSliceSeq; | |
| 35 import org.tmatesoft.hg.internal.IntTuple; | |
| 36 import org.tmatesoft.hg.internal.IntVector; | |
| 37 import org.tmatesoft.hg.internal.diff.DiffHelper.LineSequence; | |
| 38 import org.tmatesoft.hg.internal.diff.DiffHelper.LineSequence.ByteChain; | |
| 39 import org.tmatesoft.hg.internal.diff.DiffRangeMap.RangePair; | |
| 40 import org.tmatesoft.hg.core.HgBlameInspector; | |
| 41 import org.tmatesoft.hg.core.HgBlameInspector.*; | |
| 42 import org.tmatesoft.hg.repo.HgChangelog; | |
| 43 import org.tmatesoft.hg.repo.HgDataFile; | |
| 44 import org.tmatesoft.hg.repo.HgInvalidStateException; | |
| 45 import org.tmatesoft.hg.repo.HgParentChildMap; | |
| 46 import org.tmatesoft.hg.repo.HgRepository; | |
| 47 import org.tmatesoft.hg.repo.HgRevisionMap; | |
| 48 import org.tmatesoft.hg.repo.HgRuntimeException; | |
| 49 import org.tmatesoft.hg.util.Adaptable; | |
| 50 import org.tmatesoft.hg.util.CancelledException; | |
| 51 import org.tmatesoft.hg.util.Pair; | |
| 52 | |
| 53 /** | |
| 54 * Blame implementation | |
| 55 * @see HgBlameInspector | |
| 56 * @author Artem Tikhomirov | |
| 57 * @author TMate Software Ltd. | |
| 58 */ | |
| 59 public class BlameHelper { | |
| 60 | |
| 61 private final HgBlameInspector insp; | |
| 62 private FileLinesCache linesCache; | |
| 63 private HgParentChildMap<HgChangelog> clogMap; | |
| 64 | |
| 65 public BlameHelper(HgBlameInspector inspector) { | |
| 66 insp = inspector; | |
| 67 } | |
| 68 | |
| 69 /** | |
| 70 * Build history of the file for the specified range (follow renames if necessary). This history | |
| 71 * is used to access various file revision data during subsequent {@link #diff(int, int, int, int)} and | |
| 72 * {@link #annotateChange(int, int, int[], int[])} calls. Callers can use returned history for own approaches | |
| 73 * to iteration over file history. | |
| 74 | |
| 75 * <p>NOTE, clogRevIndexEnd has to list name of the supplied file in the corresponding manifest, | |
| 76 * as it's not possible to trace rename history otherwise. | |
| 77 */ | |
| 78 public FileHistory prepare(HgDataFile df, int clogRevIndexStart, int clogRevIndexEnd) throws HgRuntimeException { | |
| 79 assert clogRevIndexStart <= clogRevIndexEnd; | |
| 80 FileHistory fileHistory = new FileHistory(df, clogRevIndexStart, clogRevIndexEnd); | |
| 81 fileHistory.build(); | |
| 82 int cacheHint = 5; // cache comes useful when we follow merge branches and don't want to | |
| 83 // parse base revision twice. There's no easy way to determine max(distance(all(base,merge))), | |
| 84 // hence the heuristics to use the longest history chunk: | |
| 85 for (FileRevisionHistoryChunk c : fileHistory.iterate(OldToNew)) { | |
| 86 // iteration order is not important here | |
| 87 if (c.revisionCount() > cacheHint) { | |
| 88 cacheHint = c.revisionCount(); | |
| 89 } | |
| 90 } | |
| 91 linesCache = new FileLinesCache(cacheHint); | |
| 92 for (FileRevisionHistoryChunk fhc : fileHistory.iterate(OldToNew)) { | |
| 93 // iteration order is not important here | |
| 94 linesCache.useFileUpTo(fhc.getFile(), fhc.getEndChangeset()); | |
| 95 } | |
| 96 return fileHistory; | |
| 97 } | |
| 98 | |
| 99 // NO_REVISION is not allowed as any argument | |
| 100 public void diff(int fileRevIndex1, int clogRevIndex1, int fileRevIndex2, int clogRevIndex2) throws HgCallbackTargetException, HgRuntimeException { | |
| 101 HgDataFile targetFile = linesCache.getFile(clogRevIndex2); | |
| 102 LineSequence c1 = linesCache.lines(clogRevIndex1, fileRevIndex1); | |
| 103 LineSequence c2 = linesCache.lines(clogRevIndex2, fileRevIndex2); | |
| 104 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>(); | |
| 105 pg.init(c1, c2); | |
| 106 BlameBlockInspector bbi = new BlameBlockInspector(targetFile, fileRevIndex2, insp, clogRevIndex1, clogRevIndex2); | |
| 107 pg.findMatchingBlocks(bbi); | |
| 108 bbi.checkErrors(); | |
| 109 } | |
| 110 | |
| 111 public void annotateChange(int fileRevIndex, int csetRevIndex, int[] fileParentRevs, int[] fileParentClogRevs) throws HgCallbackTargetException, HgRuntimeException { | |
| 112 HgDataFile targetFile = linesCache.getFile(csetRevIndex); | |
| 113 final LineSequence fileRevLines = linesCache.lines(csetRevIndex, fileRevIndex); | |
| 114 if (fileParentClogRevs[0] != NO_REVISION && fileParentClogRevs[1] != NO_REVISION) { | |
| 115 int p1ClogIndex = fileParentClogRevs[0]; | |
| 116 int p2ClogIndex = fileParentClogRevs[1]; | |
| 117 LineSequence p1Lines = linesCache.lines(p1ClogIndex, fileParentRevs[0]); | |
| 118 LineSequence p2Lines = linesCache.lines(p2ClogIndex, fileParentRevs[1]); | |
| 119 MergeResolutionStrategy mergeResolver = createMergeStrategy(fileRevLines, p1Lines, p2Lines, csetRevIndex, fileParentClogRevs); | |
| 120 // | |
| 121 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>(); | |
| 122 pg.init(p1Lines, fileRevLines); | |
| 123 BlameBlockInspector bbi = new BlameBlockInspector(targetFile, fileRevIndex, insp, p1ClogIndex, csetRevIndex); | |
| 124 bbi.setMergeParent2(mergeResolver, p2ClogIndex); | |
| 125 pg.findMatchingBlocks(bbi); | |
| 126 bbi.checkErrors(); | |
| 127 } else if (fileParentClogRevs[0] == fileParentClogRevs[1]) { | |
| 128 // may be equal iff both are unset | |
| 129 assert fileParentClogRevs[0] == NO_REVISION; | |
| 130 // everything added | |
| 131 BlameBlockInspector bbi = new BlameBlockInspector(targetFile, fileRevIndex, insp, NO_REVISION, csetRevIndex); | |
| 132 bbi.begin(LineSequence.newlines(new byte[0]), fileRevLines); | |
| 133 bbi.match(0, fileRevLines.chunkCount()-1, 0); | |
| 134 bbi.end(); | |
| 135 bbi.checkErrors(); | |
| 136 } else { | |
| 137 int soleParentIndex = fileParentClogRevs[0] == NO_REVISION ? 1 : 0; | |
| 138 assert fileParentClogRevs[soleParentIndex] != NO_REVISION; | |
| 139 LineSequence parentLines = linesCache.lines(fileParentClogRevs[soleParentIndex], fileParentRevs[soleParentIndex]); | |
| 140 | |
| 141 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>(); | |
| 142 pg.init(parentLines, fileRevLines); | |
| 143 BlameBlockInspector bbi = new BlameBlockInspector(targetFile, fileRevIndex, insp, fileParentClogRevs[soleParentIndex], csetRevIndex); | |
| 144 pg.findMatchingBlocks(bbi); | |
| 145 bbi.checkErrors(); | |
| 146 } | |
| 147 } | |
| 148 | |
| 149 private static final boolean useNewStrategy = Boolean.TRUE.booleanValue(); | |
| 150 | |
| 151 private MergeResolutionStrategy createMergeStrategy(LineSequence fileRevLines, LineSequence p1Lines, LineSequence p2Lines, int csetRevIndex, int[] fileParentClogRevs) { | |
| 152 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>(); | |
| 153 if (useNewStrategy) { | |
| 154 final ArrayList<RangePairSeq> allMatches = new ArrayList<RangePairSeq>(); | |
| 155 pg.init(p2Lines, fileRevLines); | |
| 156 pg.findAllMatchAlternatives(new DiffHelper.MatchInspector<LineSequence>() { | |
| 157 private RangePairSeq matches; | |
| 158 | |
| 159 public void begin(LineSequence s1, LineSequence s2) { | |
| 160 matches = new RangePairSeq(); | |
| 161 } | |
| 162 | |
| 163 public void match(int startSeq1, int startSeq2, int matchLength) { | |
| 164 matches.add(startSeq1, startSeq2, matchLength); | |
| 165 } | |
| 166 | |
| 167 public void end() { | |
| 168 if (matches.size() > 0) { | |
| 169 allMatches.add(matches); | |
| 170 } | |
| 171 } | |
| 172 | |
| 173 }); | |
| 174 // | |
| 175 LineSequence baseLines = getBaseRevisionLines(csetRevIndex, fileParentClogRevs); | |
| 176 pg.init(p1Lines, baseLines); | |
| 177 DiffRangeMap p1ToBase = new DiffRangeMap().fill(pg); | |
| 178 pg.init(baseLines, p2Lines); | |
| 179 DiffRangeMap baseToP2 = new DiffRangeMap().fill(pg); | |
| 180 return new MergeStrategy2(allMatches, p1ToBase, baseToP2); | |
| 181 } else { | |
| 182 pg.init(p2Lines, fileRevLines); | |
| 183 EqualBlocksCollector p2MergeCommon = new EqualBlocksCollector(); | |
| 184 pg.findMatchingBlocks(p2MergeCommon); | |
| 185 return new MergeStrategy1(p2MergeCommon.matches); | |
| 186 } | |
| 187 } | |
| 188 | |
| 189 private LineSequence getBaseRevisionLines(int clogRevIndex, int[] fileParentClogRevs) { | |
| 190 assert fileParentClogRevs[0] >= 0; | |
| 191 assert fileParentClogRevs[1] >= 0; | |
| 192 HgDataFile targetFile = linesCache.getFile(clogRevIndex); | |
| 193 final HgRepository repo = targetFile.getRepo(); | |
| 194 if (clogMap == null) { | |
| 195 // FIXME replace HgParentChildMap with revlog.indexWalk(AncestorIterator)) | |
| 196 clogMap = new HgParentChildMap<HgChangelog>(repo.getChangelog()); | |
| 197 clogMap.init(); | |
| 198 } | |
| 199 final HgRevisionMap<HgChangelog> m = clogMap.getRevisionMap(); | |
| 200 Nodeid ancestor = clogMap.ancestor(m.revision(fileParentClogRevs[0]), m.revision(fileParentClogRevs[1])); | |
| 201 final int ancestorRevIndex = m.revisionIndex(ancestor); | |
| 202 Nodeid fr = repo.getManifest().getFileRevision(ancestorRevIndex, targetFile.getPath()); | |
| 203 if (fr == null) { | |
| 204 return LineSequence.newlines(new byte[0]); | |
| 205 } | |
| 206 return linesCache.lines(ancestorRevIndex, targetFile.getRevisionIndex(fr)); | |
| 207 } | |
| 208 | |
| 209 private static class FileLinesCache { | |
| 210 private final LinkedList<Pair<Integer, LineSequence>> lruCache; | |
| 211 private final int limit; | |
| 212 private final LinkedList<Pair<Integer, HgDataFile>> files; // TODO in fact, need sparse array | |
| 213 | |
| 214 /** | |
| 215 * @param lruLimit how many parsed file revisions to keep | |
| 216 */ | |
| 217 public FileLinesCache(int lruLimit) { | |
| 218 limit = lruLimit; | |
| 219 lruCache = new LinkedList<Pair<Integer, LineSequence>>(); | |
| 220 files = new LinkedList<Pair<Integer,HgDataFile>>(); | |
| 221 } | |
| 222 | |
| 223 public void useFileUpTo(HgDataFile df, int clogRevIndex) { | |
| 224 Pair<Integer, HgDataFile> newEntry = new Pair<Integer, HgDataFile>(clogRevIndex, df); | |
| 225 for (ListIterator<Pair<Integer, HgDataFile>> it = files.listIterator(); it.hasNext();) { | |
| 226 Pair<Integer, HgDataFile> e = it.next(); | |
| 227 if (e.first() == clogRevIndex) { | |
| 228 assert e.second().getPath().equals(df.getPath()); | |
| 229 return; | |
| 230 } | |
| 231 if (e.first() > clogRevIndex) { | |
| 232 // insert new entry before current | |
| 233 it.previous(); | |
| 234 it.add(newEntry); | |
| 235 return; | |
| 236 } | |
| 237 } | |
| 238 files.add(newEntry); | |
| 239 } | |
| 240 | |
| 241 public HgDataFile getFile(int clogRevIndex) { | |
| 242 for (Pair<Integer, HgDataFile> e : files) { | |
| 243 if (e.first() >= clogRevIndex) { | |
| 244 return e.second(); | |
| 245 } | |
| 246 } | |
| 247 throw new HgInvalidStateException(String.format("Got %d file-changelog mappings, but no luck for revision %d.", files.size(), clogRevIndex)); | |
| 248 } | |
| 249 | |
| 250 public LineSequence lines(int clogRevIndex, int fileRevIndex) throws HgRuntimeException { | |
| 251 Pair<Integer, LineSequence> cached = checkCache(clogRevIndex); | |
| 252 if (cached != null) { | |
| 253 return cached.second(); | |
| 254 } | |
| 255 HgDataFile df = getFile(clogRevIndex); | |
| 256 try { | |
| 257 ByteArrayChannel c; | |
| 258 df.content(fileRevIndex, c = new ByteArrayChannel()); | |
| 259 LineSequence rv = LineSequence.newlines(c.toArray()); | |
| 260 lruCache.addFirst(new Pair<Integer, LineSequence>(clogRevIndex, rv)); | |
| 261 if (lruCache.size() > limit) { | |
| 262 lruCache.removeLast(); | |
| 263 } | |
| 264 return rv; | |
| 265 } catch (CancelledException ex) { | |
| 266 // TODO likely it was bad idea to throw cancelled exception from content() | |
| 267 // deprecate and provide alternative? | |
| 268 HgInvalidStateException ise = new HgInvalidStateException("ByteArrayChannel never throws CancelledException"); | |
| 269 ise.initCause(ex); | |
| 270 throw ise; | |
| 271 } | |
| 272 } | |
| 273 | |
| 274 private Pair<Integer,LineSequence> checkCache(int fileRevIndex) { | |
| 275 Pair<Integer, LineSequence> rv = null; | |
| 276 for (ListIterator<Pair<Integer, LineSequence>> it = lruCache.listIterator(); it.hasNext(); ) { | |
| 277 Pair<Integer, LineSequence> p = it.next(); | |
| 278 if (p.first() == fileRevIndex) { | |
| 279 rv = p; | |
| 280 it.remove(); | |
| 281 break; | |
| 282 } | |
| 283 } | |
| 284 if (rv != null) { | |
| 285 lruCache.addFirst(rv); | |
| 286 } | |
| 287 return rv; | |
| 288 } | |
| 289 } | |
| 290 | |
| 291 private static class BlameBlockInspector extends DiffHelper.DeltaInspector<LineSequence> { | |
| 292 private final HgBlameInspector insp; | |
| 293 private final int csetOrigin; | |
| 294 private final int csetTarget; | |
| 295 private MergeResolutionStrategy p2MergeCommon; | |
| 296 private int csetMergeParent; | |
| 297 private final AnnotateRev annotatedRevision; | |
| 298 private HgCallbackTargetException error; | |
| 299 | |
| 300 public BlameBlockInspector(HgDataFile df, int fileRevIndex, HgBlameInspector inspector, int originCset, int targetCset) { | |
| 301 assert inspector != null; | |
| 302 insp = inspector; | |
| 303 annotatedRevision = new AnnotateRev(); | |
| 304 annotatedRevision.set(df, fileRevIndex); | |
| 305 csetOrigin = originCset; | |
| 306 csetTarget = targetCset; | |
| 307 } | |
| 308 | |
| 309 public void setMergeParent2(MergeResolutionStrategy p2MergeStrategy, int parentCset2) { | |
| 310 p2MergeCommon = p2MergeStrategy; | |
| 311 csetMergeParent = parentCset2; | |
| 312 } | |
| 313 | |
| 314 @Override | |
| 315 public void begin(LineSequence s1, LineSequence s2) { | |
| 316 super.begin(s1, s2); | |
| 317 if (shallStop()) { | |
| 318 return; | |
| 319 } | |
| 320 ContentBlock originContent = new ContentBlock(s1); | |
| 321 ContentBlock targetContent = new ContentBlock(s2); | |
| 322 annotatedRevision.set(originContent, targetContent); | |
| 323 annotatedRevision.set(csetOrigin, csetTarget, p2MergeCommon != null ? csetMergeParent : NO_REVISION); | |
| 324 RevisionDescriptor.Recipient curious = Adaptable.Factory.getAdapter(insp, RevisionDescriptor.Recipient.class, null); | |
| 325 if (curious != null) { | |
| 326 try { | |
| 327 curious.start(annotatedRevision); | |
| 328 } catch (HgCallbackTargetException ex) { | |
| 329 error = ex; | |
| 330 } | |
| 331 } | |
| 332 } | |
| 333 | |
| 334 @Override | |
| 335 public void end() { | |
| 336 super.end(); | |
| 337 if (shallStop()) { | |
| 338 return; | |
| 339 } | |
| 340 RevisionDescriptor.Recipient curious = Adaptable.Factory.getAdapter(insp, RevisionDescriptor.Recipient.class, null); | |
| 341 if (curious != null) { | |
| 342 try { | |
| 343 curious.done(annotatedRevision); | |
| 344 } catch (HgCallbackTargetException ex) { | |
| 345 error = ex; | |
| 346 } | |
| 347 } | |
| 348 p2MergeCommon = null; | |
| 349 } | |
| 350 | |
| 351 @Override | |
| 352 protected void changed(int s1From, int s1To, int s2From, int s2To) { | |
| 353 if (shallStop()) { | |
| 354 return; | |
| 355 } | |
| 356 try { | |
| 357 if (p2MergeCommon != null) { | |
| 358 IntSliceSeq mergeRanges = p2MergeCommon.combineAndMarkRangesWithSource(s1From, s1To, s2From, s2To, csetOrigin, csetMergeParent); | |
| 359 | |
| 360 /* | |
| 361 * Usecases, how it USED TO BE initially: | |
| 362 * 3 lines changed to 10 lines. range of 10 lines breaks down to 2 from p2, 3 from p1, and 5 from p2. | |
| 363 * We report: 2 lines changed to 2(p2), then 1 line changed with 3(p1) and 5 lines added from p2. | |
| 364 * | |
| 365 * 10 lines changed to 3 lines, range of 3 lines breaks down to 2 line from p1 and 1 line from p2. | |
| 366 * We report: 2 lines changed to 2(p1) and 8 lines changed to 1(p2) | |
| 367 * | |
| 368 * NOW, lines from p2 are always reported as pure add (since we need their insertion point to be in p2, not in p1) | |
| 369 * and we try to consume p1 changes as soon as we see first p1's range | |
| 370 */ | |
| 371 int s1TotalLines = s1To - s1From, s1ConsumedLines = 0, s1Start = s1From; | |
| 372 | |
| 373 for (Iterator<IntTuple> it = mergeRanges.iterator(); it.hasNext();) { | |
| 374 IntTuple mergeRange = it.next(); | |
| 375 final int rangeOrigin = mergeRange.at(0); | |
| 376 final int rangeStart = mergeRange.at(1); | |
| 377 final int rangeLen = mergeRange.at(2); | |
| 378 final boolean lastRange = it.hasNext(); | |
| 379 final int s1LinesLeft = s1TotalLines - s1ConsumedLines; | |
| 380 // how many lines we may report as changed (don't use more than in range unless it's the very last range) | |
| 381 final int s1LinesToBorrow = lastRange ? s1LinesLeft : Math.min(s1LinesLeft, rangeLen); | |
| 382 if (rangeOrigin != csetMergeParent && s1LinesToBorrow > 0) { | |
| 383 ChangeBlockImpl block = getChangeBlock(s1Start, s1LinesToBorrow, rangeStart, rangeLen); | |
| 384 block.setOriginAndTarget(rangeOrigin, csetTarget); | |
| 385 insp.changed(block); | |
| 386 s1ConsumedLines += s1LinesToBorrow; | |
| 387 s1Start += s1LinesToBorrow; | |
| 388 } else { | |
| 389 int blockInsPoint = rangeOrigin != csetMergeParent ? s1Start : p2MergeCommon.getLineInP2(rangeStart); | |
| 390 ChangeBlockImpl block = getAddBlock(rangeStart, rangeLen, blockInsPoint); | |
| 391 block.setOriginAndTarget(rangeOrigin, csetTarget); | |
| 392 insp.added(block); | |
| 393 } | |
| 394 } | |
| 395 if (s1ConsumedLines != s1TotalLines) { | |
| 396 assert s1ConsumedLines < s1TotalLines : String.format("Expected to process %d lines, but actually was %d", s1TotalLines, s1ConsumedLines); | |
| 397 // either there were no ranges from p1, whole s2From..s2To range came from p2, shall report as deleted | |
| 398 // or the ranges found were not enough to consume whole s2From..s2To | |
| 399 // The "deletion point" is shifted to the end of last csetOrigin->csetTarget change | |
| 400 int s2DeletePoint = s2From + s1ConsumedLines; | |
| 401 ChangeBlockImpl block = new ChangeBlockImpl(annotatedRevision.origin, null, s1Start, s1To - s1Start, -1, -1, -1, s2DeletePoint); | |
| 402 block.setOriginAndTarget(csetOrigin, csetTarget); | |
| 403 insp.deleted(block); | |
| 404 } | |
| 405 } else { | |
| 406 ChangeBlockImpl block = getChangeBlock(s1From, s1To - s1From, s2From, s2To - s2From); | |
| 407 block.setOriginAndTarget(csetOrigin, csetTarget); | |
| 408 insp.changed(block); | |
| 409 } | |
| 410 } catch (HgCallbackTargetException ex) { | |
| 411 error = ex; | |
| 412 } | |
| 413 } | |
| 414 | |
| 415 @Override | |
| 416 protected void added(int s1InsertPoint, int s2From, int s2To) { | |
| 417 if (shallStop()) { | |
| 418 return; | |
| 419 } | |
| 420 try { | |
| 421 if (p2MergeCommon != null) { | |
| 422 IntSliceSeq mergeRanges = p2MergeCommon.combineAndMarkRangesWithSource(s1InsertPoint, s2From, s2To, csetOrigin, csetMergeParent); | |
| 423 int insPoint = s1InsertPoint; // track changes to insertion point | |
| 424 for (IntTuple mergeRange : mergeRanges) { | |
| 425 int rangeOrigin = mergeRange.at(0); | |
| 426 int rangeStart = mergeRange.at(1); | |
| 427 int rangeLen = mergeRange.at(2); | |
| 428 // XXX likely need somewhat similar to the code above: | |
| 429 // int blockInsPoint = rangeOrigin != csetMergeParent ? s1Start : p2MergeCommon.reverseMapLine(rangeStart); | |
| 430 // | |
| 431 ChangeBlockImpl block = getAddBlock(rangeStart, rangeLen, insPoint); | |
| 432 block.setOriginAndTarget(rangeOrigin, csetTarget); | |
| 433 insp.added(block); | |
| 434 // indicate insPoint moved down number of lines we just reported | |
| 435 insPoint += rangeLen; | |
| 436 } | |
| 437 } else { | |
| 438 ChangeBlockImpl block = getAddBlock(s2From, s2To - s2From, s1InsertPoint); | |
| 439 block.setOriginAndTarget(csetOrigin, csetTarget); | |
| 440 insp.added(block); | |
| 441 } | |
| 442 } catch (HgCallbackTargetException ex) { | |
| 443 error = ex; | |
| 444 } | |
| 445 } | |
| 446 | |
| 447 @Override | |
| 448 protected void deleted(int s2DeletePoint, int s1From, int s1To) { | |
| 449 if (shallStop()) { | |
| 450 return; | |
| 451 } | |
| 452 try { | |
| 453 ChangeBlockImpl block = new ChangeBlockImpl(annotatedRevision.origin, null, s1From, s1To - s1From, -1, -1, -1, s2DeletePoint); | |
| 454 block.setOriginAndTarget(csetOrigin, csetTarget); | |
| 455 insp.deleted(block); | |
| 456 } catch (HgCallbackTargetException ex) { | |
| 457 error = ex; | |
| 458 } | |
| 459 } | |
| 460 | |
| 461 @Override | |
| 462 protected void unchanged(int s1From, int s2From, int length) { | |
| 463 if (shallStop()) { | |
| 464 return; | |
| 465 } | |
| 466 try { | |
| 467 EqualBlockImpl block = new EqualBlockImpl(s1From, s2From, length, annotatedRevision.target); | |
| 468 block.setOriginAndTarget(csetOrigin, csetTarget); | |
| 469 insp.same(block); | |
| 470 } catch (HgCallbackTargetException ex) { | |
| 471 error = ex; | |
| 472 } | |
| 473 } | |
| 474 | |
| 475 void checkErrors() throws HgCallbackTargetException { | |
| 476 if (error != null) { | |
| 477 throw error; | |
| 478 } | |
| 479 } | |
| 480 | |
| 481 private boolean shallStop() { | |
| 482 return error != null; | |
| 483 } | |
| 484 | |
| 485 private ChangeBlockImpl getAddBlock(int start, int len, int insPoint) { | |
| 486 return new ChangeBlockImpl(null, annotatedRevision.target, -1, -1, start, len, insPoint, -1); | |
| 487 } | |
| 488 | |
| 489 private ChangeBlockImpl getChangeBlock(int start1, int len1, int start2, int len2) { | |
| 490 return new ChangeBlockImpl(annotatedRevision.origin, annotatedRevision.target, start1, len1, start2, len2, start1, start2); | |
| 491 } | |
| 492 } | |
| 493 | |
| 494 private static class BlockImpl implements Block { | |
| 495 private int originCset; | |
| 496 private int targetCset; | |
| 497 | |
| 498 void setOriginAndTarget(int originChangesetIndex, int targetChangesetIndex) { | |
| 499 // XXX perhaps, shall be part of Inspector API, rather than Block's | |
| 500 // as they don't change between blocks (although the moment about merged revisions) | |
| 501 // is not yet clear to me | |
| 502 originCset = originChangesetIndex; | |
| 503 targetCset = targetChangesetIndex; | |
| 504 } | |
| 505 | |
| 506 public int originChangesetIndex() { | |
| 507 return originCset; | |
| 508 } | |
| 509 | |
| 510 public int targetChangesetIndex() { | |
| 511 return targetCset; | |
| 512 } | |
| 513 } | |
| 514 | |
| 515 private static class EqualBlockImpl extends BlockImpl implements EqualBlock { | |
| 516 private final int start1, start2; | |
| 517 private final int length; | |
| 518 private final ContentBlock fullContent; | |
| 519 private FilterBlock myContent; | |
| 520 | |
| 521 EqualBlockImpl(int blockStartSeq1, int blockStartSeq2, int blockLength, ContentBlock targetContent) { | |
| 522 start1 = blockStartSeq1; | |
| 523 start2 = blockStartSeq2; | |
| 524 length = blockLength; | |
| 525 fullContent = targetContent; | |
| 526 } | |
| 527 | |
| 528 public int originStart() { | |
| 529 return start1; | |
| 530 } | |
| 531 | |
| 532 public int targetStart() { | |
| 533 return start2; | |
| 534 } | |
| 535 | |
| 536 public int length() { | |
| 537 return length; | |
| 538 } | |
| 539 | |
| 540 public BlockData content() { | |
| 541 if (myContent == null) { | |
| 542 myContent = new FilterBlock(fullContent, start2, length); | |
| 543 } | |
| 544 return myContent; | |
| 545 } | |
| 546 | |
| 547 @Override | |
| 548 public String toString() { | |
| 549 return String.format("@@ [%d..%d) == [%d..%d) @@", start1, start1+length, start2, start2+length); | |
| 550 } | |
| 551 } | |
| 552 | |
| 553 private static class ChangeBlockImpl extends BlockImpl implements ChangeBlock { | |
| 554 private final ContentBlock oldContent; | |
| 555 private final ContentBlock newContent; | |
| 556 private final int s1Start; | |
| 557 private final int s1Len; | |
| 558 private final int s2Start; | |
| 559 private final int s2Len; | |
| 560 private final int s1InsertPoint; | |
| 561 private final int s2DeletePoint; | |
| 562 private FilterBlock addedBlock, removedBlock; | |
| 563 | |
| 564 public ChangeBlockImpl(ContentBlock c1, ContentBlock c2, int s1Start, int s1Len, int s2Start, int s2Len, int s1InsertPoint, int s2DeletePoint) { | |
| 565 oldContent = c1; | |
| 566 newContent = c2; | |
| 567 this.s1Start = s1Start; | |
| 568 this.s1Len = s1Len; | |
| 569 this.s2Start = s2Start; | |
| 570 this.s2Len = s2Len; | |
| 571 this.s1InsertPoint = s1InsertPoint; | |
| 572 this.s2DeletePoint = s2DeletePoint; | |
| 573 } | |
| 574 | |
| 575 public int insertedAt() { | |
| 576 return s1InsertPoint; | |
| 577 } | |
| 578 | |
| 579 public int firstAddedLine() { | |
| 580 return s2Start; | |
| 581 } | |
| 582 | |
| 583 public int totalAddedLines() { | |
| 584 return s2Len; | |
| 585 } | |
| 586 | |
| 587 public BlockData addedLines() { | |
| 588 if (addedBlock == null) { | |
| 589 addedBlock = new FilterBlock(newContent, firstAddedLine(), totalAddedLines()); | |
| 590 } | |
| 591 return addedBlock; | |
| 592 } | |
| 593 | |
| 594 public int removedAt() { | |
| 595 return s2DeletePoint; | |
| 596 } | |
| 597 | |
| 598 public int firstRemovedLine() { | |
| 599 return s1Start; | |
| 600 } | |
| 601 | |
| 602 public int totalRemovedLines() { | |
| 603 return s1Len; | |
| 604 } | |
| 605 | |
| 606 public BlockData removedLines() { | |
| 607 if (removedBlock == null) { | |
| 608 removedBlock = new FilterBlock(oldContent, firstRemovedLine(), totalRemovedLines()); | |
| 609 } | |
| 610 return removedBlock; | |
| 611 } | |
| 612 | |
| 613 @Override | |
| 614 public String toString() { | |
| 615 if (s2DeletePoint == -1) { | |
| 616 return String.format("@@ -%d,0 +%d,%d @@", insertedAt(), firstAddedLine(), totalAddedLines()); | |
| 617 } else if (s1InsertPoint == -1) { | |
| 618 // delete only | |
| 619 return String.format("@@ -%d,%d +%d,0 @@", firstRemovedLine(), totalRemovedLines(), removedAt()); | |
| 620 } | |
| 621 return String.format("@@ -%d,%d +%d,%d @@", firstRemovedLine(), totalRemovedLines(), firstAddedLine(), totalAddedLines()); | |
| 622 } | |
| 623 } | |
| 624 | |
| 625 private static class SingleLine implements BlockData { | |
| 626 private final ByteChain line; | |
| 627 | |
| 628 public SingleLine(ByteChain lineContent) { | |
| 629 line = lineContent; | |
| 630 } | |
| 631 | |
| 632 public BlockData elementAt(int index) { | |
| 633 assert false; | |
| 634 return null; | |
| 635 } | |
| 636 | |
| 637 public int elementCount() { | |
| 638 return 0; | |
| 639 } | |
| 640 | |
| 641 public byte[] asArray() { | |
| 642 return line.data(); | |
| 643 } | |
| 644 } | |
| 645 | |
| 646 private static class ContentBlock implements BlockData { | |
| 647 private final LineSequence seq; | |
| 648 | |
| 649 public ContentBlock(LineSequence sequence) { | |
| 650 seq = sequence; | |
| 651 } | |
| 652 | |
| 653 public BlockData elementAt(int index) { | |
| 654 return new SingleLine(seq.chunk(index)); | |
| 655 } | |
| 656 | |
| 657 public int elementCount() { | |
| 658 return seq.chunkCount() - 1; | |
| 659 } | |
| 660 | |
| 661 public byte[] asArray() { | |
| 662 return seq.data(0, seq.chunkCount() - 1); | |
| 663 } | |
| 664 } | |
| 665 | |
| 666 private static class FilterBlock implements BlockData { | |
| 667 private final ContentBlock contentBlock; | |
| 668 private final int from; | |
| 669 private final int length; | |
| 670 | |
| 671 public FilterBlock(ContentBlock bd, int startFrom, int len) { | |
| 672 assert bd != null; | |
| 673 assert startFrom + len < bd.seq.chunkCount(); // there's one extra chunk in the end, so strict less is ok | |
| 674 contentBlock = bd; | |
| 675 from = startFrom; | |
| 676 length = len; | |
| 677 } | |
| 678 | |
| 679 public BlockData elementAt(int index) { | |
| 680 if (index < 0 || index >= length) { | |
| 681 throw new IllegalArgumentException(String.format("Expected value from [0..%d), got %d", length, index)); | |
| 682 } | |
| 683 return contentBlock.elementAt(from + index); | |
| 684 } | |
| 685 | |
| 686 public int elementCount() { | |
| 687 return length; | |
| 688 } | |
| 689 | |
| 690 public byte[] asArray() { | |
| 691 return contentBlock.seq.data(from, from + length); | |
| 692 } | |
| 693 } | |
| 694 | |
| 695 private static class EqualBlocksCollector implements DiffHelper.MatchInspector<LineSequence> { | |
| 696 private final RangePairSeq matches = new RangePairSeq(); | |
| 697 | |
| 698 public void begin(LineSequence s1, LineSequence s2) { | |
| 699 } | |
| 700 | |
| 701 public void match(int startSeq1, int startSeq2, int matchLength) { | |
| 702 matches.add(startSeq1, startSeq2, matchLength); | |
| 703 } | |
| 704 | |
| 705 public void end() { | |
| 706 } | |
| 707 | |
| 708 public void intersectWithTarget(int start, int length, IntVector result) { | |
| 709 int s = start; | |
| 710 for (int l = start, x = start + length; l < x; l++) { | |
| 711 if (!matches.includesTargetLine(l)) { | |
| 712 if (l - s > 0) { | |
| 713 result.add(s); | |
| 714 result.add(l - s); | |
| 715 } | |
| 716 s = l+1; | |
| 717 } | |
| 718 } | |
| 719 if (s < start+length) { | |
| 720 result.add(s); | |
| 721 result.add((start + length) - s); | |
| 722 } | |
| 723 } | |
| 724 | |
| 725 } | |
| 726 | |
| 727 interface MergeResolutionStrategy { | |
| 728 /** | |
| 729 * breaks region [start2..end2) into ranges according to deduced (or simply guessed) | |
| 730 * matching of [start1..end1) lines to lines in source1 and source2 | |
| 731 * @return list of tuples (source, start, length), where source is one of the identifiers supplied | |
| 732 */ | |
| 733 public IntSliceSeq combineAndMarkRangesWithSource(int start1, int end1, int start2, int end2, int source1, int source2); | |
| 734 public IntSliceSeq combineAndMarkRangesWithSource(int insPoint, int start, int end, int source1, int source2); | |
| 735 public int getLineInP2(int mergeLine); | |
| 736 } | |
| 737 | |
| 738 // report lines as merged from p2 solely based on whether target line belongs | |
| 739 // to a region that is equal to p2 region | |
| 740 private static class MergeStrategy1 implements MergeResolutionStrategy { | |
| 741 // equal ranges in p2 and merged revision | |
| 742 private final RangePairSeq matches; | |
| 743 private final IntSliceSeq mergeRanges; | |
| 744 | |
| 745 public MergeStrategy1(RangePairSeq p2EqualToM) { | |
| 746 matches = p2EqualToM; | |
| 747 mergeRanges = new IntSliceSeq(3, 10, 10); | |
| 748 } | |
| 749 | |
| 750 /* | |
| 751 * intersects [start..start+length) with ranges of target lines, and based on the intersection | |
| 752 * breaks initial range into smaller ranges and records them into result, with marker to indicate | |
| 753 * whether the range is from initial range (markerSource) or is a result of the intersection with target | |
| 754 * (markerTarget) | |
| 755 */ | |
| 756 private IntSliceSeq doCombine(int start, int length, int markerSource, int markerTarget) { | |
| 757 mergeRanges.clear(); | |
| 758 assert mergeRanges.sliceSize() == 3; | |
| 759 int sourceStart = start, targetStart = start, sourceEnd = start + length; | |
| 760 for (int l = sourceStart; l < sourceEnd; l++) { | |
| 761 if (matches.includesTargetLine(l)) { | |
| 762 // l is from target | |
| 763 if (sourceStart < l) { | |
| 764 // few lines from source range were not in the target, report them | |
| 765 mergeRanges.add(markerSource, sourceStart, l - sourceStart); | |
| 766 } | |
| 767 // indicate the earliest line from source range to use | |
| 768 sourceStart = l + 1; | |
| 769 } else { | |
| 770 // l is not in target | |
| 771 if (targetStart < l) { | |
| 772 // report lines from target range | |
| 773 mergeRanges.add(markerTarget, targetStart, l - targetStart); | |
| 774 } | |
| 775 // next line *may* be from target | |
| 776 targetStart = l + 1; | |
| 777 } | |
| 778 } | |
| 779 // if source range end with line from target, sourceStart would be == sourceEnd, and we need to add range with markerTarget | |
| 780 // if source range doesn't end with target line, targetStart == sourceEnd, while sourceStart < sourceEnd | |
| 781 if (sourceStart < sourceEnd) { | |
| 782 assert targetStart == sourceEnd; | |
| 783 // something left from the source range | |
| 784 mergeRanges.add(markerSource, sourceStart, sourceEnd - sourceStart); | |
| 785 } else if (targetStart < sourceEnd) { | |
| 786 assert sourceStart == sourceEnd; | |
| 787 mergeRanges.add(markerTarget, targetStart, sourceEnd - targetStart); | |
| 788 } | |
| 789 return mergeRanges; | |
| 790 } | |
| 791 | |
| 792 public int getLineInP2(int mergeLine) { | |
| 793 return matches.reverseMapLine(mergeLine); | |
| 794 } | |
| 795 | |
| 796 public IntSliceSeq combineAndMarkRangesWithSource(int start1, int end1, int start2, int end2, int source1, int source2) { | |
| 797 return doCombine(start2, end2 - start2, source1, source2); | |
| 798 } | |
| 799 | |
| 800 public IntSliceSeq combineAndMarkRangesWithSource(int insPoint, int start, int end, int source1, int source2) { | |
| 801 return doCombine(start, end - start, source1, source2); | |
| 802 } | |
| 803 } | |
| 804 | |
| 805 private static class MergeStrategy2 implements MergeResolutionStrategy { | |
| 806 // equal ranges in p2 and merged revision | |
| 807 private final List<RangePairSeq> matches; | |
| 808 private final IntSliceSeq mergeRanges; | |
| 809 private final DiffRangeMap p1ToBase; | |
| 810 private final DiffRangeMap baseToP2; | |
| 811 | |
| 812 public MergeStrategy2(List<RangePairSeq> p2EqualToM, DiffRangeMap p1ToBaseRanges, DiffRangeMap baseToP2Ranges) { | |
| 813 matches = p2EqualToM; | |
| 814 p1ToBase = p1ToBaseRanges; | |
| 815 baseToP2= baseToP2Ranges; | |
| 816 mergeRanges = new IntSliceSeq(3, 10, 10); | |
| 817 } | |
| 818 | |
| 819 | |
| 820 public IntSliceSeq combineAndMarkRangesWithSource(int insPoint, int start, int end, int source1, int source2) { | |
| 821 return combineAndMarkRangesWithSource(insPoint, insPoint, start, end, source1, source2); | |
| 822 } | |
| 823 | |
| 824 public IntSliceSeq combineAndMarkRangesWithSource(int start1, int end1, int start2, int end2, int source1, int source2) { | |
| 825 mergeRanges.clear(); | |
| 826 IntSliceSeq mergedLines = new IntSliceSeq(2, end2-start2, 0); | |
| 827 for (int i = start2; i < end2; i++) { | |
| 828 mergedLines.add(source1, 0); | |
| 829 } | |
| 830 // [s1Start..s1End) // range in p1 seen as changed in m | |
| 831 for (RangePair p1_b : p1ToBase.findInSource(start1, end1)) { | |
| 832 // there might be few ranges in (p1-base) that overlap with (p1-m) changes | |
| 833 for (RangePair b_p2 : baseToP2.findInSource(p1_b.start2(), p1_b.end2())) { | |
| 834 // regions in p2 that correspond to affected regions in base | |
| 835 for (int p2Line = b_p2.start2(); p2Line < b_p2.end2(); p2Line++) { | |
| 836 for (RangePairSeq eq : matches) { | |
| 837 if (eq.includesOriginLine(p2Line)) { | |
| 838 // this line in p2 is equal to some line in merge | |
| 839 int mergeLine = eq.mapLineIndex(p2Line); | |
| 840 if (mergeLine >= start2 && mergeLine < end2) { | |
| 841 mergedLines.set(mergeLine - start2, source2, p2Line); | |
| 842 } | |
| 843 } | |
| 844 } | |
| 845 } | |
| 846 } | |
| 847 } | |
| 848 int lineCount = 0, start = start2; | |
| 849 int lastSeenSource = source1; | |
| 850 for (IntTuple t : mergedLines) { | |
| 851 if (t.at(0) == lastSeenSource) { | |
| 852 lineCount++; | |
| 853 } else { | |
| 854 if (lineCount > 0) { | |
| 855 mergeRanges.add(lastSeenSource, start, lineCount); | |
| 856 start += lineCount; | |
| 857 } | |
| 858 lineCount = 1; | |
| 859 lastSeenSource = t.at(0); | |
| 860 } | |
| 861 } | |
| 862 if (lineCount > 0) { | |
| 863 mergeRanges.add(lastSeenSource, start, lineCount); | |
| 864 } | |
| 865 return mergeRanges; | |
| 866 } | |
| 867 | |
| 868 public int getLineInP2(int mergeLine) { | |
| 869 for (RangePairSeq eq : matches) { | |
| 870 if (eq.includesTargetLine(mergeLine)) { | |
| 871 return eq.reverseMapLine(mergeLine); | |
| 872 } | |
| 873 } | |
| 874 return -1; | |
| 875 } | |
| 876 } | |
| 877 | |
| 878 | |
| 879 private static class AnnotateRev implements RevisionDescriptor { | |
| 880 public ContentBlock origin, target; | |
| 881 public int originCset, targetCset, mergeCset, fileRevIndex; | |
| 882 public HgDataFile df; | |
| 883 | |
| 884 public void set(HgDataFile file, int fileRev) { | |
| 885 df = file; | |
| 886 fileRevIndex = fileRev; | |
| 887 } | |
| 888 public void set(ContentBlock o, ContentBlock t) { | |
| 889 origin = o; | |
| 890 target = t; | |
| 891 } | |
| 892 public void set(int o, int t, int m) { | |
| 893 originCset = o; | |
| 894 targetCset = t; | |
| 895 mergeCset = m; | |
| 896 } | |
| 897 | |
| 898 public BlockData origin() { | |
| 899 return origin; | |
| 900 } | |
| 901 | |
| 902 public BlockData target() { | |
| 903 return target; | |
| 904 } | |
| 905 | |
| 906 public int originChangesetIndex() { | |
| 907 return originCset; | |
| 908 } | |
| 909 | |
| 910 public int targetChangesetIndex() { | |
| 911 return targetCset; | |
| 912 } | |
| 913 | |
| 914 public boolean isMerge() { | |
| 915 return mergeCset != NO_REVISION; | |
| 916 } | |
| 917 | |
| 918 public int mergeChangesetIndex() { | |
| 919 return mergeCset; | |
| 920 } | |
| 921 | |
| 922 public int fileRevisionIndex() { | |
| 923 return fileRevIndex; | |
| 924 } | |
| 925 public HgDataFile file() { | |
| 926 return df; | |
| 927 } | |
| 928 @Override | |
| 929 public String toString() { | |
| 930 if (isMerge()) { | |
| 931 return String.format("[%d,%d->%d]", originCset, mergeCset, targetCset); | |
| 932 } | |
| 933 return String.format("[%d->%d]", originCset, targetCset); | |
| 934 } | |
| 935 } | |
| 936 | |
| 937 public static void main(String[] args) { | |
| 938 EqualBlocksCollector bc = new EqualBlocksCollector(); | |
| 939 bc.match(-1, 5, 3); | |
| 940 bc.match(-1, 10, 2); | |
| 941 bc.match(-1, 15, 3); | |
| 942 bc.match(-1, 20, 3); | |
| 943 IntVector r = new IntVector(); | |
| 944 bc.intersectWithTarget(7, 10, r); | |
| 945 for (int i = 0; i < r.size(); i+=2) { | |
| 946 System.out.printf("[%d..%d) ", r.get(i), r.get(i) + r.get(i+1)); | |
| 947 } | |
| 948 System.out.println(); | |
| 949 MergeStrategy1 ms = new MergeStrategy1(bc.matches); | |
| 950 IntSliceSeq mr = ms.doCombine(0, 16, 508, 514); | |
| 951 for (IntTuple t : mr) { | |
| 952 System.out.printf("%d:[%d..%d) ", t.at(0), t.at(1), t.at(1) + t.at(2)); | |
| 953 } | |
| 954 System.out.println(); | |
| 955 System.out.println(); | |
| 956 DiffRangeMap m1 = new DiffRangeMap(); // p1 -> base | |
| 957 m1.match(0, 0, 1); // =1..1 -> 1..1 | |
| 958 m1.match(7, 3, 0); // *2..7 -> 2..3 | |
| 959 DiffRangeMap m2 = new DiffRangeMap(); // base -> p2 | |
| 960 m2.match(0, 0, 1); // =1..1 -> 1..1 | |
| 961 m2.match(3, 3, 0); // *2..3 -> 2..3 | |
| 962 RangePairSeq eq1 = new RangePairSeq(); | |
| 963 eq1.add(0, 0, 3); | |
| 964 RangePairSeq eq2 = new RangePairSeq(); | |
| 965 eq2.add(0, 4, 3); | |
| 966 MergeStrategy2 ms2 = new MergeStrategy2(Arrays.asList(eq1, eq2), m1, m2); | |
| 967 mr = ms2.combineAndMarkRangesWithSource(5, 7, 5, 7, 33, 44); | |
| 968 for (IntTuple t : mr) { | |
| 969 System.out.printf("%d:[%d..%d) ", t.at(0), t.at(1), t.at(1) + t.at(2)); | |
| 970 } | |
| 971 } | |
| 972 } |
