diff --git a/README.md b/README.md index 74700fb..278eb4a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# SOS v1.5.3 # +# SOS v1.5.4 # [![Travis badge](https://travis-ci.org/ArneBachmann/sos.svg?branch=master)](https://travis-ci.org/ArneBachmann/sos) [![Build status](https://ci.appveyor.com/api/projects/status/fe915rtx02buqe4r?svg=true)](https://ci.appveyor.com/project/ArneBachmann/sos) diff --git a/setup.py b/setup.py index 3a22e8f..059dcf3 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ import os, shutil, subprocess, sys, time, unittest from setuptools import setup, find_packages -RELEASE = "1.5.3" +RELEASE = "1.5.4" COMPATIBILITY_LEVEL = "3.4" print("sys.argv is %r" % sys.argv) diff --git a/sos/sos.coco b/sos/sos.coco index 987b8ca..a626ed7 100644 --- a/sos/sos.coco +++ b/sos/sos.coco @@ -21,8 +21,8 @@ except: import configr -# Lazy imports for quicker initialization -shutil:Any +# Lazy module auto-import for quick initialization +shutil:Any? class shutil: def __getattribute__(_, key): global shutil import shutil # overrides global reference @@ -170,7 +170,7 @@ class Metadata: if verbose: info("Duplicating branch '%s' to '%s'..." % (_.branches[_.branch].name ?? ("b%d" % _.branch), (name ?? "b%d" % branch))) now:int = int(time.time() * 1000) _.loadBranch(_.branch) # load commits for current (originating) branch - revision:int = max(_.commits) + revision:int = max(_.commits) if _.commits else 0 _.commits.clear() newBranch:BranchInfo = dataCopy(BranchInfo, _.branches[_.branch], number = branch, ctime = now, name = name ?? "Branched from '%s'" % (_.branches[_.branch].name ?? "b%d" % _.branch), @@ -208,7 +208,7 @@ class Metadata: os.makedirs(encode(revisionFolder(branch, 0, base = _.root))) if _.branch is not None: # not immediately after "offline" - copy files from current branch _.loadBranch(_.branch) - revision:int = max(_.commits) # TODO what if last switch was to an earlier revision? no persisting of last checkout + revision:int = max(_.commits) if _.commits else 0 # TODO what if last switch was to an earlier revision? no persisting of last checkout _.computeSequentialPathSet(_.branch, revision) # full set of files in revision to _.paths for path, pinfo in _.paths.items(): _.copyVersionedFile(_.branch, revision, branch, 0, pinfo) @@ -218,30 +218,42 @@ class Metadata: _.branches[branch] = BranchInfo(branch, _.commits[0].ctime, name, True if len(_.branches) == 0 else _.branches[_.branch].inSync, tracked, untracked) # save branch info, in case it is needed def removeBranch(_, branch:int) -> BranchInfo = - ''' Entirely remove a branch and all its revisions from the file system. ''' - binfo:BranchInfo - deps:List[Tuple[int,int]] = [(binfo.number, binfo.revision) for binfo in _.branches.values() if binfo.parent is not None and _.getParentBranch(binfo.number, 0) == branch] # get transitively depending branches - if deps: # need to copy all parent revisions to dependet branches first - minrev:int = min([e[1] for e in deps]) # minimum revision ever branched from parent (ignoring transitive branching!) + ''' Entirely remove a branch and all its revisions from the file system. + We currently implement a simplified logic that fully re-creates all revisions for all transitively depending branches instead of only removing the one parent branch. + ''' + import collections # used almost only here + binfo:BranchInfo # typing info + deps:List[Tuple[int,int]] = [(binfo.number, binfo.revision) for binfo in _.branches.values() if binfo.parent is not None and branch in _.getParentBranches(binfo.number, 0)] # all transitively depending branches + newcommits:Dict[int,Dict[int,CommitInfo]] = collections.defaultdict(dict) # gathers commit info of re-created branches (branch -> revision -> info) + if deps: # need to copy all parent revisions to dependent branches first + minrev:int = min(e[1] for e in deps) # minimum revision ever branched from parent: up to this revision we can simply them to all dependant branches progress:ProgressIndicator = ProgressIndicator(PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0]) - for rev in range(0, minrev + 1): # rely on caching by copying revision-wise as long as needed in all depending branches - for dep, _rev in deps: if rev <= _rev: - printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) - shutil.copytree(encode(revisionFolder(branch, rev, base = _.root)), encode(revisionFolder(dep, rev, base = _.root))) # folder would not exist yet - for dep, _rev in deps: # copy remaining revisions per branch + for rev in range(0, minrev + 1): # rely on caching by copying revision-wise as long as needed into all depending branches + for dep, _rev in deps: + printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) # TODO align placement of indicator with other uses of progress + _.loadBranch(_.getParentBranch(branch, rev)) # load commits and set _.branch (in case branch to remove was also fast-branched) +# if rev in _.commits: # TODO uncomment? - if not, it was an empty commit? because on non-commit branches there's no revision 0? + newcommits[dep][rev] = _.commits[rev] + shutil.copytree(encode(revisionFolder(_.branch, rev, base = _.root)), encode(revisionFolder(dep, rev, base = _.root))) + for dep, _rev in deps: # copy remaining revisions by branch instead by revision for rev in range(minrev + 1, _rev + 1): printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) - shutil.copytree(encode(revisionFolder(_.getParentBranch(dep, rev), rev, base = _.root)), encode(revisionFolder(dep, rev, base = _.root))) - _.branches[dep] = dataCopy(BranchInfo, _.branches[dep], parent = None, revision = None) # remove reference information - printo(pure.ljust() + "\r") + _.loadBranch(_.getParentBranch(dep, rev)) # WARN using dep intead of branch here! + if rev in _.commits: # false only if no added or modified files during fast-branch? + newcommits[dep][rev] = _.commits[rev] + shutil.copytree(encode(revisionFolder(_.branch, rev, base = _.root)), encode(revisionFolder(dep, rev, base = _.root))) + _.branches[dep] = dataCopy(BranchInfo, _.branches[dep], parent = None, revision = None) # delete fast-branching reference information + printo(pure.ljust() + "\r") # clean line output tryOrIgnore(() -> shutil.rmtree(encode(branchFolder(branch) + BACKUP_SUFFIX))) # remove previous backup first - try: os.rename(encode(branchFolder(branch)), encode(branchFolder(branch) + BACKUP_SUFFIX)) - except: Exit("Cannot rename branch metadata to prepare removal. Are there locked or open files?") - binfo = _.branches[branch] # keep reference for caller + tryOrIgnore(() -> os.rename(encode(branchFolder(branch)), encode(branchFolder(branch) + BACKUP_SUFFIX)), (E) -> Exit("Cannot rename branch metadata to prepare removal. Are there locked or open files?")) + binfo = _.branches[branch] # keep reference to removed branch info for caller del _.branches[branch] - _.branch = max(_.branches) # switch to another valid branch - _.saveBranches() - _.commits.clear() + _.branch = (branch + 1) if (branch + 1) in _.branches else max(_.branches) # switch to another valid branch + _.saveBranches() # persist modified branches list + for branch, commits in newcommits.items(): # now store aggregated commit infos + _.commits = commits + _.saveBranch(branch) + _.commits.clear() # clean memory binfo def loadCommit(_, branch:int, revision:int): @@ -343,13 +355,14 @@ class Metadata: ''' Returns nothing, just updates _.paths in place. ''' next(_.computeSequentialPathSetIterator(branch, revision, incrementally = False)) # simply invoke the generator once to get full results - def computeSequentialPathSetIterator(_, branch:int, revision:int, incrementally:bool = True) -> Iterator[Dict[str,PathInfo]]?: + def computeSequentialPathSetIterator(_, branch:int, revision:int, incrementally:bool = True, startwith:int = 0) -> Iterator[Dict[str,PathInfo]]?: ''' In-memory computation of current list of valid PathInfo entries for specified branch and through specified revision. ''' - _.loadCommit(branch, 0) # load initial paths + try: _.loadCommit(branch, startwith) # load initial paths + except: yield {}; return None # no revisions if incrementally: yield _.paths m:Metadata = Metadata(_.root); rev:int # next changes TODO avoid loading all metadata and config - for rev in range(1, revision + 1): - m.loadCommit(_.getParentBranch(branch, rev), rev) + for rev in range(startwith + 1, revision + 1): + m.loadCommit(branch, rev) for p, info in m.paths.items(): if info.size == None: del _.paths[p] else: _.paths[p] = info @@ -391,12 +404,14 @@ class Metadata: if revision < 0: Exit("Cannot determine versioned file '%s' from specified branch '%d'" % (nameHash, branch)) revision, source - def getParentBranch(_, branch:int, revision:int) -> int = + def getParentBranches(_, branch:int, revision:int) -> List[int] = ''' Determine originating branch for a (potentially branched) revision, traversing all branch parents until found. ''' - other:int? = _.branches[branch].parent # reference to originating parent branch, or None - if other is None or revision > _.branches[branch].revision: return branch # need to load commit from other branch instead - while _.branches[other].parent is not None and revision <= _.branches[other].revision: other = _.branches[other].parent - other + others:List[int] = [_.branches[branch].parent] # reference to originating parent branch, or None + if others[0] is None or revision > _.branches[branch].revision: return [branch] # found. need to load commit from other branch instead + while _.branches[others[-1]].parent is not None and revision <= _.branches[others[-1]].revision: others.append(_.branches[others[-1]].parent) # find true original branch for revision + others + + def getParentBranch(_, branch:int, revision:int) -> int = _.getParentBranches(branch, revision)[-1] def getHighestRevision(_, branch:int) -> int? = ''' Find highest revision of a branch, even if current branch has no commits. ''' @@ -624,8 +639,8 @@ def status(argument:str? = None, vcs:str? = None, cmd:str? = None, options:str[] trackingPatterns:FrozenSet[str] = m.getTrackingPatterns() untrackingPatterns:FrozenSet[str] = m.getTrackingPatterns(negative = True) m.loadBranch(current) - maxi:int = max(m.commits) if m.commits else m.branches[m.branch].revision - m.computeSequentialPathSet(current, maxi) # load all commits up to specified revision # line 508 + maxi:int? = max(m.commits) if m.commits else m.branches[m.branch].revision + if maxi is not None: m.computeSequentialPathSet(current, maxi) # load all commits up to specified revision, except no commits changed, _msg = m.findChanges( checkContent = strict, considerOnly = onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns), @@ -646,7 +661,7 @@ def status(argument:str? = None, vcs:str? = None, cmd:str? = None, options:str[] m.loadCommit(m.branch, commit_) for pinfo in m.paths.values(): original += pinfo.size ?? 0 maxi = max(m.commits) if m.commits else m.branches[branch.number].revision - printo(" %s b%d%s @%s (%s) with %d commits, using %.2f MiB (+%.3f%% SOS overhead%s)%s" % ("*" if current == branch.number else " ", branch.number, ((" %%%ds" % (sl + 2)) % ("'%s'" % branch.name)) if branch.name else "", strftime(branch.ctime), "in sync" if branch.inSync else "modified", len(m.commits), pl_amount + oh_amount, oh_amount * 100. / (pl_amount + oh_amount), ", %s compression/deduplication" % (("%.2f%s" % (float(original) / float(payload), MULT_SYMBOL if m.c.useUnicodeFont else "x")) if payload > 0 else "full") if m.compress or (len(m.commits) > 0 and len(m.commits) != max(m.commits) + 1) else "", (". Last comment: '%s'" % m.commits[maxi].message) if maxi in m.commits and m.commits[maxi].message else "")) + printo(" %s b%d%s @%s (%s) with %d commits, using %.2f MiB (+%.3f%% SOS overhead%s)%s" % ("*" if current == branch.number else " ", branch.number, ((" %%%ds" % (sl + 2)) % (("'%s'" % branch.name) if branch.name else "")), strftime(branch.ctime), "in sync" if branch.inSync else "modified", len(m.commits), pl_amount + oh_amount, oh_amount * 100. / (pl_amount + oh_amount), ", %s compression/deduplication" % (("%.2f%s" % (float(original) / float(payload), MULT_SYMBOL if m.c.useUnicodeFont else "x")) if payload > 0 else "full") if m.compress or (len(m.commits) > 0 and len(m.commits) != max(m.commits) + 1) else "", (". Last comment: '%s'" % m.commits[maxi].message) if maxi in m.commits and m.commits[maxi].message else "")) if m.track or m.picky and (len(m.branches[m.branch].tracked) > 0 or len(m.branches[m.branch].untracked) > 0): info("\nTracked file patterns:") # TODO print matching untracking patterns side-by-side printo(ajoin(" | ", m.branches[m.branch].tracked, "\n")) @@ -916,7 +931,7 @@ def log(options:str[] = []): _mod:FrozenSet[str] = frozenset([_ for _, info in {k: nxts[k] for k in news - _add - _del}.items() if last[_].size != info.size or (last[_].hash != info.hash if m.strict else last[_].mtime != info.mtime)]) # _mov:FrozenSet[str] = detectMoves(ChangeSet(nxts, {o: None for o in olds}) # TODO determine moves - can we reuse detectMoves(changes)? _txt:int = len([m_ for m_ in _mod if m.isTextType(m_)]) - printo(" %s r%s @%s (+%02d/-%02d/%s%02d/T%02d) |%s|%s" % ("*" if commit.number == maxi else " ", ("%%%ds" % nl) % commit.number, strftime(commit.ctime), len(_add), len(_del), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(_mod), _txt, (commit.message ?? ""), "TAG" if (commit.message ?? "") in m.tags else "")) + printo(" %s r%s @%s (+%02d/-%02d/%s%02d/T%02d) |%s|%s" % ((ARROW_SYMBOL if m.c.useUnicodeFont else "*") if commit.number == maxi else " ", ("%%%ds" % nl) % commit.number, strftime(commit.ctime), len(_add), len(_del), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(_mod), _txt, (commit.message ?? ""), "TAG" if (commit.message ?? "") in m.tags else "")) if changes_: m.listChanges <| ChangeSet({a: None for a in _add}, {d: None for d in _del}, {m: None for m in _mod}, {}) # TODO moves detection? if diff_: pass # _diff(m, changes) # needs from revision diff olds = news # replaces olds for next revision compare @@ -971,12 +986,12 @@ def dump(argument:str, options:str[] = []): def publish(message:str?, cmd:str, options:str[] = [], onlys:FrozenSet[str]? = None, excps:FrozenSet[str]? = None): ''' Write changes made to the branch into one commit of the underlying VCS without further checks. ''' - m:Metadata = Metadata() + m:Metadata = Metadata() # TODO SOS only commit whats different from VCS state? if not (m.track or m.picky): Exit("Not implemented for simple repository mode yet") # TODO add manual file picking mode instead (add by extension, recursive, ... see issue for details) m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options, onlys = onlys, excps = excps) maxi:int? = m.getHighestRevision(branch) if maxi is None: Exit("No revision to publish on current branch (or any of its parents after fast-branching)") - m.computeSequentialPathSet(branch, maxi) # load all commits up to specified revision + m.computeSequentialPathSet(branch, maxi, startwith = 1 if maxi >= 1 and not '--all' in options and not (m.track or m.picky) else 0) # load all commits up to specified revision # HINT logic to only add changed files vs. originating file state - would require in-depth underlying VCS knowledge. We currentöy assume commit 0 as base # TODO discuss: only commit changes from r1.. onward vs. r0?, or attempt to add everything in repo, even if unchanged? the problem is that for different branches we might need to switch also underlying branches import subprocess # only required in this section @@ -1167,10 +1182,12 @@ def main(): # Main part -force_sos:bool = '--sos' in sys.argv -force_vcs:bool = '--vcs' in sys.argv -verbose:bool = '--verbose' in sys.argv or '-v' in sys.argv # imported from utility, and only modified here -debug_:bool = os.environ.get("DEBUG", "False").lower() == "true" or '--debug' in sys.argv +force_sos:bool = [None] if '--sos' in sys.argv else [] # this is a trick allowing to modify the flags from the test suite +force_vcs:bool = [None] if '--vcs' in sys.argv else [] +verbose:bool = [None] if '--verbose' in sys.argv or '-v' in sys.argv else [] # imported from utility, and only modified here +debug_:bool = [None] if os.environ.get("DEBUG", "False").lower() == "true" or '--debug' in sys.argv else [] level:int = logging.DEBUG if '--debug' in sys.argv else logging.INFO + _log = Logger(logging.getLogger(__name__)); debug, info, warn, error = _log.debug, _log.info, _log.warn, _log.error + if __name__ == '__main__': main() diff --git a/sos/sos.py b/sos/sos.py index c3e2ff9..9f61a1a 100644 --- a/sos/sos.py +++ b/sos/sos.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# __coconut_hash__ = 0x89ff4c26 +# __coconut_hash__ = 0x8a4aeb41 # Compiled with Coconut version 1.3.1-post_dev28 [Dead Parrot] @@ -47,8 +47,8 @@ import configr # line 21 -# Lazy imports for quicker initialization -shutil = None # type: Any # line 25 +# Lazy module auto-import for quick initialization +shutil = None # type: _coconut.typing.Optional[Any] # line 25 class shutil: # line 26 @_coconut_tco # line 26 def __getattribute__(_, key): # line 26 @@ -216,7 +216,7 @@ def duplicateBranch(_, branch: 'int', name: '_coconut.typing.Optional[str]'=None info("Duplicating branch '%s' to '%s'..." % ((lambda _coconut_none_coalesce_item: ("b%d" % _.branch) if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(_.branches[_.branch].name), (("b%d" % branch if name is None else name)))) # line 170 now = int(time.time() * 1000) # type: int # line 171 _.loadBranch(_.branch) # load commits for current (originating) branch # line 172 - revision = max(_.commits) # type: int # line 173 + revision = max(_.commits) if _.commits else 0 # type: int # line 173 _.commits.clear() # line 174 newBranch = dataCopy(BranchInfo, _.branches[_.branch], number=branch, ctime=now, name=("Branched from '%s'" % ((lambda _coconut_none_coalesce_item: "b%d" % _.branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(_.branches[_.branch].name)) if name is None else name), tracked=[t for t in _.branches[_.branch].tracked], untracked=[u for u in _.branches[_.branch].untracked], parent=None if full else _.branch, revision=None if full else revision) # type: BranchInfo # line 175 os.makedirs(encode(revisionFolder(branch, 0, base=_.root) if full else branchFolder(branch, base=_.root))) # line 180 @@ -253,7 +253,7 @@ def createBranch(_, branch: 'int', name: '_coconut.typing.Optional[str]'=None, i os.makedirs(encode(revisionFolder(branch, 0, base=_.root))) # line 208 if _.branch is not None: # not immediately after "offline" - copy files from current branch # line 209 _.loadBranch(_.branch) # line 210 - revision = max(_.commits) # type: int # TODO what if last switch was to an earlier revision? no persisting of last checkout # line 211 + revision = max(_.commits) if _.commits else 0 # type: int # TODO what if last switch was to an earlier revision? no persisting of last checkout # line 211 _.computeSequentialPathSet(_.branch, revision) # full set of files in revision to _.paths # line 212 for path, pinfo in _.paths.items(): # line 213 _.copyVersionedFile(_.branch, revision, branch, 0, pinfo) # line 214 @@ -263,52 +263,61 @@ def createBranch(_, branch: 'int', name: '_coconut.typing.Optional[str]'=None, i _.branches[branch] = BranchInfo(branch, _.commits[0].ctime, name, True if len(_.branches) == 0 else _.branches[_.branch].inSync, tracked, untracked) # save branch info, in case it is needed # line 218 def removeBranch(_, branch: 'int') -> 'BranchInfo': # line 220 - ''' Entirely remove a branch and all its revisions from the file system. ''' # line 221 - binfo = None # type: BranchInfo # line 222 - deps = [(binfo.number, binfo.revision) for binfo in _.branches.values() if binfo.parent is not None and _.getParentBranch(binfo.number, 0) == branch] # type: List[Tuple[int, int]] # get transitively depending branches # line 223 - if deps: # need to copy all parent revisions to dependet branches first # line 224 - minrev = min([e[1] for e in deps]) # type: int # minimum revision ever branched from parent (ignoring transitive branching!) # line 225 - progress = ProgressIndicator(PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0]) # type: ProgressIndicator # line 226 - for rev in range(0, minrev + 1): # rely on caching by copying revision-wise as long as needed in all depending branches # line 227 - for dep, _rev in deps: # line 228 - if rev <= _rev: # line 228 - printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) # line 229 - shutil.copytree(encode(revisionFolder(branch, rev, base=_.root)), encode(revisionFolder(dep, rev, base=_.root))) # folder would not exist yet # line 230 - for dep, _rev in deps: # copy remaining revisions per branch # line 231 - for rev in range(minrev + 1, _rev + 1): # line 232 - printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) # line 233 - shutil.copytree(encode(revisionFolder(_.getParentBranch(dep, rev), rev, base=_.root)), encode(revisionFolder(dep, rev, base=_.root))) # line 234 - _.branches[dep] = dataCopy(BranchInfo, _.branches[dep], parent=None, revision=None) # remove reference information # line 235 - printo(pure.ljust() + "\r") # line 236 - tryOrIgnore(lambda: shutil.rmtree(encode(branchFolder(branch) + BACKUP_SUFFIX))) # remove previous backup first # line 237 - try: # line 238 - os.rename(encode(branchFolder(branch)), encode(branchFolder(branch) + BACKUP_SUFFIX)) # line 238 - except: # line 239 - Exit("Cannot rename branch metadata to prepare removal. Are there locked or open files?") # line 239 - binfo = _.branches[branch] # keep reference for caller # line 240 - del _.branches[branch] # line 241 - _.branch = max(_.branches) # switch to another valid branch # line 242 - _.saveBranches() # line 243 - _.commits.clear() # line 244 - return binfo # line 245 - - def loadCommit(_, branch: 'int', revision: 'int'): # line 247 - ''' Load all file information from a commit meta data; if branched from another branch before specified revision, load correct revision recursively. ''' # line 248 - _branch = _.getParentBranch(branch, revision) # type: int # line 249 - with codecs.open(encode(revisionFolder(_branch, revision, base=_.root, file=metaFile)), "r", encoding=UTF8) as fd: # line 250 - _.paths = json.load(fd) # line 250 - _.paths = {path: PathInfo(*item) for path, item in _.paths.items()} # re-create type info # line 251 - _.branch = branch # store current branch information = "switch" to loaded branch/commit # line 252 - - def saveCommit(_, branch: 'int', revision: 'int'): # line 254 - ''' Save all file information to a commit meta data file. ''' # line 255 - target = revisionFolder(branch, revision, base=_.root) # type: str # line 256 - tryOrIgnore(lambda _=None: os.makedirs(encode(target))) # line 257 - tryOrIgnore(lambda _=None: shutil.copy2(encode(os.path.join(target, metaFile)), encode(os.path.join(target, metaBack)))) # ignore error for first backup # line 258 - with codecs.open(encode(os.path.join(target, metaFile)), "w", encoding=UTF8) as fd: # line 259 - json.dump(_.paths, fd, ensure_ascii=False) # line 259 - - def findChanges(_, branch: '_coconut.typing.Optional[int]'=None, revision: '_coconut.typing.Optional[int]'=None, checkContent: 'bool'=False, inverse: 'bool'=False, considerOnly: '_coconut.typing.Optional[FrozenSet[str]]'=None, dontConsider: '_coconut.typing.Optional[FrozenSet[str]]'=None, progress: 'bool'=False) -> 'Tuple[ChangeSet, _coconut.typing.Optional[str]]': # line 261 + ''' Entirely remove a branch and all its revisions from the file system. + We currently implement a simplified logic that fully re-creates all revisions for all transitively depending branches instead of only removing the one parent branch. + ''' # line 223 + import collections # used almost only here # line 224 + binfo = None # type: BranchInfo # typing info # line 225 + deps = [(binfo.number, binfo.revision) for binfo in _.branches.values() if binfo.parent is not None and branch in _.getParentBranches(binfo.number, 0)] # type: List[Tuple[int, int]] # all transitively depending branches # line 226 + newcommits = collections.defaultdict(dict) # type: Dict[int, Dict[int, CommitInfo]] # gathers commit info of re-created branches (branch -> revision -> info) # line 227 + if deps: # need to copy all parent revisions to dependent branches first # line 228 + minrev = min((e[1] for e in deps)) # type: int # minimum revision ever branched from parent: up to this revision we can simply them to all dependant branches # line 229 + progress = ProgressIndicator(PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0]) # type: ProgressIndicator # line 230 + for rev in range(0, minrev + 1): # rely on caching by copying revision-wise as long as needed into all depending branches # line 231 + for dep, _rev in deps: # line 232 + printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) # TODO align placement of indicator with other uses of progress # line 233 + _.loadBranch(_.getParentBranch(branch, rev)) # load commits and set _.branch (in case branch to remove was also fast-branched) # line 234 +# if rev in _.commits: # TODO uncomment? - if not, it was an empty commit? because on non-commit branches there's no revision 0? + newcommits[dep][rev] = _.commits[rev] # line 236 + shutil.copytree(encode(revisionFolder(_.branch, rev, base=_.root)), encode(revisionFolder(dep, rev, base=_.root))) # line 237 + for dep, _rev in deps: # copy remaining revisions by branch instead by revision # line 238 + for rev in range(minrev + 1, _rev + 1): # line 239 + printo("\rIntegrating revision %02d into dependant branch %02d %s" % (rev, dep, progress.getIndicator())) # line 240 + _.loadBranch(_.getParentBranch(dep, rev)) # WARN using dep intead of branch here! # line 241 + if rev in _.commits: # false only if no added or modified files during fast-branch? # line 242 + newcommits[dep][rev] = _.commits[rev] # line 243 + shutil.copytree(encode(revisionFolder(_.branch, rev, base=_.root)), encode(revisionFolder(dep, rev, base=_.root))) # line 244 + _.branches[dep] = dataCopy(BranchInfo, _.branches[dep], parent=None, revision=None) # delete fast-branching reference information # line 245 + printo(pure.ljust() + "\r") # clean line output # line 246 + tryOrIgnore(lambda: shutil.rmtree(encode(branchFolder(branch) + BACKUP_SUFFIX))) # remove previous backup first # line 247 + tryOrIgnore(lambda: os.rename(encode(branchFolder(branch)), encode(branchFolder(branch) + BACKUP_SUFFIX)), lambda E: Exit("Cannot rename branch metadata to prepare removal. Are there locked or open files?")) # line 248 + binfo = _.branches[branch] # keep reference to removed branch info for caller # line 249 + del _.branches[branch] # line 250 + _.branch = (branch + 1) if (branch + 1) in _.branches else max(_.branches) # switch to another valid branch # line 251 + _.saveBranches() # persist modified branches list # line 252 + for branch, commits in newcommits.items(): # now store aggregated commit infos # line 253 + _.commits = commits # line 254 + _.saveBranch(branch) # line 255 + _.commits.clear() # clean memory # line 256 + return binfo # line 257 + + def loadCommit(_, branch: 'int', revision: 'int'): # line 259 + ''' Load all file information from a commit meta data; if branched from another branch before specified revision, load correct revision recursively. ''' # line 260 + _branch = _.getParentBranch(branch, revision) # type: int # line 261 + with codecs.open(encode(revisionFolder(_branch, revision, base=_.root, file=metaFile)), "r", encoding=UTF8) as fd: # line 262 + _.paths = json.load(fd) # line 262 + _.paths = {path: PathInfo(*item) for path, item in _.paths.items()} # re-create type info # line 263 + _.branch = branch # store current branch information = "switch" to loaded branch/commit # line 264 + + def saveCommit(_, branch: 'int', revision: 'int'): # line 266 + ''' Save all file information to a commit meta data file. ''' # line 267 + target = revisionFolder(branch, revision, base=_.root) # type: str # line 268 + tryOrIgnore(lambda _=None: os.makedirs(encode(target))) # line 269 + tryOrIgnore(lambda _=None: shutil.copy2(encode(os.path.join(target, metaFile)), encode(os.path.join(target, metaBack)))) # ignore error for first backup # line 270 + with codecs.open(encode(os.path.join(target, metaFile)), "w", encoding=UTF8) as fd: # line 271 + json.dump(_.paths, fd, ensure_ascii=False) # line 271 + + def findChanges(_, branch: '_coconut.typing.Optional[int]'=None, revision: '_coconut.typing.Optional[int]'=None, checkContent: 'bool'=False, inverse: 'bool'=False, considerOnly: '_coconut.typing.Optional[FrozenSet[str]]'=None, dontConsider: '_coconut.typing.Optional[FrozenSet[str]]'=None, progress: 'bool'=False) -> 'Tuple[ChangeSet, _coconut.typing.Optional[str]]': # line 273 ''' Find changes on the file system vs. in-memory paths (which should reflect the latest commit state). Only if both branch and revision are *not* None, write modified/added files to the specified revision folder (thus creating a new revision) checkContent: also computes file content hashes @@ -317,1142 +326,1153 @@ def findChanges(_, branch: '_coconut.typing.Optional[int]'=None, revision: '_coc dontConsider: set of tracking patterns to not consider in changes (always overrides considerOnly) progress: Show file names during processing returns: (ChangeSet = the state of file tree *differences*, unless "inverse" is True -> then return original data, message) - ''' # line 270 - import collections # used only in this method # line 271 - write = branch is not None and revision is not None # line 272 - if write: # line 273 - try: # line 274 - os.makedirs(encode(revisionFolder(branch, revision, base=_.root))) # line 274 - except FileExistsError: # HINT "try" only necessary for *testing* hash collision code (!) TODO probably raise exception otherwise in any case? # line 275 - pass # HINT "try" only necessary for *testing* hash collision code (!) TODO probably raise exception otherwise in any case? # line 275 - changed = ChangeSet({}, {}, {}, {}) # type: ChangeSet # TODO Needs explicity initialization due to mypy problems with default arguments :-( # line 276 - indicator = ProgressIndicator(PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0]) if progress else None # type: _coconut.typing.Optional[ProgressIndicator] # optional file list progress indicator # line 277 - hashed = None # type: _coconut.typing.Optional[str] # line 278 - written = None # type: int # line 278 - compressed = 0 # type: int # line 278 - original = 0 # type: int # line 278 - start_time = time.time() # type: float # line 278 - knownPaths = collections.defaultdict(list) # type: Dict[str, List[str]] # line 279 - for path, pinfo in _.paths.items(): # line 280 - if pinfo.size is not None and (considerOnly is None or any((path[:path.rindex(SLASH)] == pattern[:pattern.rindex(SLASH)] and fnmatch.fnmatch(path[path.rindex(SLASH) + 1:], pattern[pattern.rindex(SLASH) + 1:]) for pattern in considerOnly))) and (dontConsider is None or not any((path[:path.rindex(SLASH)] == pattern[:pattern.rindex(SLASH)] and fnmatch.fnmatch(path[path.rindex(SLASH) + 1:], pattern[pattern.rindex(SLASH) + 1:]) for pattern in dontConsider))): # line 281 - knownPaths[os.path.dirname(path)].append(os.path.basename(path)) # TODO reimplement using fnmatch.filter and set operations for all files per path for speed # line 284 - for path, dirnames, filenames in os.walk(_.root): # line 285 - path = decode(path) # line 286 - dirnames[:] = [decode(d) for d in dirnames] # line 287 - filenames[:] = [decode(f) for f in filenames] # line 288 - dirnames[:] = [d for d in dirnames if len([n for n in _.c.ignoreDirs if fnmatch.fnmatch(d, n)]) == 0 or len([p for p in _.c.ignoreDirsWhitelist if fnmatch.fnmatch(d, p)]) > 0] # global ignores # line 289 - filenames[:] = [f for f in filenames if len([n for n in _.c.ignores if fnmatch.fnmatch(f, n)]) == 0 or len([p for p in _.c.ignoresWhitelist if fnmatch.fnmatch(f, p)]) > 0] # line 290 - dirnames.sort() # line 291 - filenames.sort() # line 291 - relPath = os.path.relpath(path, _.root).replace(os.sep, SLASH) # type: str # line 292 - walk = list(filenames if considerOnly is None else reduce(lambda last, pattern: last | set(fnmatch.filter(filenames, os.path.basename(pattern))), (p for p in considerOnly if os.path.dirname(p).replace(os.sep, SLASH) == relPath), _coconut.set())) # type: List[str] # line 293 - if dontConsider: # line 294 - walk[:] = [fn for fn in walk if not any((fnmatch.fnmatch(fn, os.path.basename(p)) for p in dontConsider if os.path.dirname(p).replace(os.sep, SLASH) == relPath))] # line 295 - for file in walk: # if m.track or m.picky: only files that match any path-relevant tracking patterns # line 296 - filename = relPath + SLASH + file # line 297 - filepath = os.path.join(path, file) # line 298 - try: # line 299 - stat = os.stat(encode(filepath)) # line 299 - except Exception as E: # line 300 - exception(E) # line 300 - continue # line 300 - size, mtime = stat.st_size, int(stat.st_mtime * 1000) # line 301 - show = indicator.getIndicator() if progress else None # type: _coconut.typing.Optional[str] # line 302 - if show: # indication character returned # line 303 - outstring = "\r%s %s %s" % ("Preparing" if write else "Checking", show, filename) # line 304 - printo(pure.ljust(outstring), nl="") # line 305 - progressSymbols = PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0] # type: str # line 306 - if filename not in _.paths: # detected file not present (or untracked) in (other) branch # line 307 - nameHash = hashStr(filename) # line 308 - try: # line 309 - hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=nameHash) if write else None, callback=(lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if show else None) if size > 0 else (None, 0) # line 310 - changed.additions[filename] = PathInfo(nameHash, size, mtime, hashed) # line 311 - compressed += written # line 312 - original += size # line 312 - except Exception as E: # line 313 - exception(E) # line 313 - continue # with next file # line 314 - last = _.paths[filename] # filename is known - check for modifications # line 315 - if last.size is None: # was removed before but is now added back - does not apply for tracking mode (which never marks files for removal in the history) # line 316 - try: # line 317 - hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=last.nameHash) if write else None, callback=None if not progress else lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if size > 0 else (None, 0) # line 318 - changed.additions[filename] = PathInfo(last.nameHash, size, mtime, hashed) # line 319 - continue # line 319 - except Exception as E: # line 320 - exception(E) # line 320 - elif size != last.size or (not checkContent and mtime != last.mtime) or (checkContent and tryOrDefault(lambda: (hashFile(filepath, _.compress, symbols=progressSymbols)[0] != last.hash), default=False)): # detected a modification TODO wrap hashFile exception # line 321 - try: # line 322 - hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=last.nameHash) if write else None, callback=None if not progress else lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if (last.size if inverse else size) > 0 else (last.hash if inverse else None, 0) # line 323 - changed.modifications[filename] = PathInfo(last.nameHash, last.size if inverse else size, last.mtime if inverse else mtime, hashed) # line 324 + ''' # line 282 + import collections # used only in this method # line 283 + write = branch is not None and revision is not None # line 284 + if write: # line 285 + try: # line 286 + os.makedirs(encode(revisionFolder(branch, revision, base=_.root))) # line 286 + except FileExistsError: # HINT "try" only necessary for *testing* hash collision code (!) TODO probably raise exception otherwise in any case? # line 287 + pass # HINT "try" only necessary for *testing* hash collision code (!) TODO probably raise exception otherwise in any case? # line 287 + changed = ChangeSet({}, {}, {}, {}) # type: ChangeSet # TODO Needs explicity initialization due to mypy problems with default arguments :-( # line 288 + indicator = ProgressIndicator(PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0]) if progress else None # type: _coconut.typing.Optional[ProgressIndicator] # optional file list progress indicator # line 289 + hashed = None # type: _coconut.typing.Optional[str] # line 290 + written = None # type: int # line 290 + compressed = 0 # type: int # line 290 + original = 0 # type: int # line 290 + start_time = time.time() # type: float # line 290 + knownPaths = collections.defaultdict(list) # type: Dict[str, List[str]] # line 291 + for path, pinfo in _.paths.items(): # line 292 + if pinfo.size is not None and (considerOnly is None or any((path[:path.rindex(SLASH)] == pattern[:pattern.rindex(SLASH)] and fnmatch.fnmatch(path[path.rindex(SLASH) + 1:], pattern[pattern.rindex(SLASH) + 1:]) for pattern in considerOnly))) and (dontConsider is None or not any((path[:path.rindex(SLASH)] == pattern[:pattern.rindex(SLASH)] and fnmatch.fnmatch(path[path.rindex(SLASH) + 1:], pattern[pattern.rindex(SLASH) + 1:]) for pattern in dontConsider))): # line 293 + knownPaths[os.path.dirname(path)].append(os.path.basename(path)) # TODO reimplement using fnmatch.filter and set operations for all files per path for speed # line 296 + for path, dirnames, filenames in os.walk(_.root): # line 297 + path = decode(path) # line 298 + dirnames[:] = [decode(d) for d in dirnames] # line 299 + filenames[:] = [decode(f) for f in filenames] # line 300 + dirnames[:] = [d for d in dirnames if len([n for n in _.c.ignoreDirs if fnmatch.fnmatch(d, n)]) == 0 or len([p for p in _.c.ignoreDirsWhitelist if fnmatch.fnmatch(d, p)]) > 0] # global ignores # line 301 + filenames[:] = [f for f in filenames if len([n for n in _.c.ignores if fnmatch.fnmatch(f, n)]) == 0 or len([p for p in _.c.ignoresWhitelist if fnmatch.fnmatch(f, p)]) > 0] # line 302 + dirnames.sort() # line 303 + filenames.sort() # line 303 + relPath = os.path.relpath(path, _.root).replace(os.sep, SLASH) # type: str # line 304 + walk = list(filenames if considerOnly is None else reduce(lambda last, pattern: last | set(fnmatch.filter(filenames, os.path.basename(pattern))), (p for p in considerOnly if os.path.dirname(p).replace(os.sep, SLASH) == relPath), _coconut.set())) # type: List[str] # line 305 + if dontConsider: # line 306 + walk[:] = [fn for fn in walk if not any((fnmatch.fnmatch(fn, os.path.basename(p)) for p in dontConsider if os.path.dirname(p).replace(os.sep, SLASH) == relPath))] # line 307 + for file in walk: # if m.track or m.picky: only files that match any path-relevant tracking patterns # line 308 + filename = relPath + SLASH + file # line 309 + filepath = os.path.join(path, file) # line 310 + try: # line 311 + stat = os.stat(encode(filepath)) # line 311 + except Exception as E: # line 312 + exception(E) # line 312 + continue # line 312 + size, mtime = stat.st_size, int(stat.st_mtime * 1000) # line 313 + show = indicator.getIndicator() if progress else None # type: _coconut.typing.Optional[str] # line 314 + if show: # indication character returned # line 315 + outstring = "\r%s %s %s" % ("Preparing" if write else "Checking", show, filename) # line 316 + printo(pure.ljust(outstring), nl="") # line 317 + progressSymbols = PROGRESS_MARKER[1 if _.c.useUnicodeFont else 0] # type: str # line 318 + if filename not in _.paths: # detected file not present (or untracked) in (other) branch # line 319 + nameHash = hashStr(filename) # line 320 + try: # line 321 + hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=nameHash) if write else None, callback=(lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if show else None) if size > 0 else (None, 0) # line 322 + changed.additions[filename] = PathInfo(nameHash, size, mtime, hashed) # line 323 + compressed += written # line 324 + original += size # line 324 except Exception as E: # line 325 exception(E) # line 325 - else: # with next file # line 326 continue # with next file # line 326 - compressed += written # line 327 - original += last.size if inverse else size # line 327 - if relPath in knownPaths: # at least one file is tracked TODO may leave empty lists in dict # line 328 - knownPaths[relPath][:] = list(set(knownPaths[relPath]) - set(walk)) # at least one file is tracked TODO may leave empty lists in dict # line 328 - for path, names in knownPaths.items(): # all paths that weren't walked by # line 329 - for file in names: # line 330 - if len([n for n in _.c.ignores if fnmatch.fnmatch(file, n)]) > 0 and len([p for p in _.c.ignoresWhitelist if fnmatch.fnmatch(file, p)]) == 0: # don't mark ignored files as deleted # line 331 - continue # don't mark ignored files as deleted # line 331 - pth = path + SLASH + file # type: str # line 332 - changed.deletions[pth] = _.paths[pth] # line 333 - changed = dataCopy(ChangeSet, changed, moves=detectMoves(changed)) # line 334 - if progress: # forces clean line of progress output # line 335 - printo("\r" + pure.ljust() + "\r", nl="") # forces clean line of progress output # line 335 - elif verbose: # line 336 - info("Finished detecting changes") # line 336 - tt = time.time() - start_time # type: float # line 337 - speed = (original / (KIBI * tt)) if tt > 0. else 0. # type: float # line 337 - msg = (("Compression advantage is %.1f%%" % (original * 100. / compressed - 100.)) if _.compress and write and compressed > 0 else "") # type: str # line 338 - msg = (msg + " | " if msg else "") + ("Transfer speed was %.2f %siB/s." % (speed if speed < 1500. else speed / KIBI, "k" if speed < 1500. else "M") if original > 0 and tt > 0. else "") # line 339 - return (changed, msg if msg else None) # line 340 - - def computeSequentialPathSet(_, branch: 'int', revision: 'int'): # line 342 - ''' Returns nothing, just updates _.paths in place. ''' # line 343 - next(_.computeSequentialPathSetIterator(branch, revision, incrementally=False)) # simply invoke the generator once to get full results # line 344 - - def computeSequentialPathSetIterator(_, branch: 'int', revision: 'int', incrementally: 'bool'=True) -> '_coconut.typing.Optional[Iterator[Dict[str, PathInfo]]]': # line 346 - ''' In-memory computation of current list of valid PathInfo entries for specified branch and through specified revision. ''' # line 347 - _.loadCommit(branch, 0) # load initial paths # line 348 - if incrementally: # line 349 - yield _.paths # line 349 - m = Metadata(_.root) # type: Metadata # next changes TODO avoid loading all metadata and config # line 350 - rev = None # type: int # next changes TODO avoid loading all metadata and config # line 350 - for rev in range(1, revision + 1): # line 351 - m.loadCommit(_.getParentBranch(branch, rev), rev) # line 352 - for p, info in m.paths.items(): # line 353 - if info.size == None: # line 354 - del _.paths[p] # line 354 - else: # line 355 - _.paths[p] = info # line 355 - if incrementally: # line 356 - yield _.paths # line 356 - yield None # for the default case - not incrementally # line 357 - - def getTrackingPatterns(_, branch: '_coconut.typing.Optional[int]'=None, negative: 'bool'=False) -> 'FrozenSet[str]': # line 359 - ''' Returns list of tracking patterns (or untracking patterns if negative) for provided branch or current branch. ''' # line 360 - return _coconut.frozenset() if not (_.track or _.picky) else frozenset(_.branches[(_.branch if branch is None else branch)].untracked if negative else _.branches[(_.branch if branch is None else branch)].tracked) # line 361 - - def parseRevisionString(_, argument: 'str') -> 'Tuple[_coconut.typing.Optional[int], _coconut.typing.Optional[int]]': # line 363 + last = _.paths[filename] # filename is known - check for modifications # line 327 + if last.size is None: # was removed before but is now added back - does not apply for tracking mode (which never marks files for removal in the history) # line 328 + try: # line 329 + hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=last.nameHash) if write else None, callback=None if not progress else lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if size > 0 else (None, 0) # line 330 + changed.additions[filename] = PathInfo(last.nameHash, size, mtime, hashed) # line 331 + continue # line 331 + except Exception as E: # line 332 + exception(E) # line 332 + elif size != last.size or (not checkContent and mtime != last.mtime) or (checkContent and tryOrDefault(lambda: (hashFile(filepath, _.compress, symbols=progressSymbols)[0] != last.hash), default=False)): # detected a modification TODO wrap hashFile exception # line 333 + try: # line 334 + hashed, written = hashFile(filepath, _.compress, symbols=progressSymbols, saveTo=revisionFolder(branch, revision, base=_.root, file=last.nameHash) if write else None, callback=None if not progress else lambda sign: printo(pure.ljust(outstring + " " + sign), nl="")) if (last.size if inverse else size) > 0 else (last.hash if inverse else None, 0) # line 335 + changed.modifications[filename] = PathInfo(last.nameHash, last.size if inverse else size, last.mtime if inverse else mtime, hashed) # line 336 + except Exception as E: # line 337 + exception(E) # line 337 + else: # with next file # line 338 + continue # with next file # line 338 + compressed += written # line 339 + original += last.size if inverse else size # line 339 + if relPath in knownPaths: # at least one file is tracked TODO may leave empty lists in dict # line 340 + knownPaths[relPath][:] = list(set(knownPaths[relPath]) - set(walk)) # at least one file is tracked TODO may leave empty lists in dict # line 340 + for path, names in knownPaths.items(): # all paths that weren't walked by # line 341 + for file in names: # line 342 + if len([n for n in _.c.ignores if fnmatch.fnmatch(file, n)]) > 0 and len([p for p in _.c.ignoresWhitelist if fnmatch.fnmatch(file, p)]) == 0: # don't mark ignored files as deleted # line 343 + continue # don't mark ignored files as deleted # line 343 + pth = path + SLASH + file # type: str # line 344 + changed.deletions[pth] = _.paths[pth] # line 345 + changed = dataCopy(ChangeSet, changed, moves=detectMoves(changed)) # line 346 + if progress: # forces clean line of progress output # line 347 + printo("\r" + pure.ljust() + "\r", nl="") # forces clean line of progress output # line 347 + elif verbose: # line 348 + info("Finished detecting changes") # line 348 + tt = time.time() - start_time # type: float # line 349 + speed = (original / (KIBI * tt)) if tt > 0. else 0. # type: float # line 349 + msg = (("Compression advantage is %.1f%%" % (original * 100. / compressed - 100.)) if _.compress and write and compressed > 0 else "") # type: str # line 350 + msg = (msg + " | " if msg else "") + ("Transfer speed was %.2f %siB/s." % (speed if speed < 1500. else speed / KIBI, "k" if speed < 1500. else "M") if original > 0 and tt > 0. else "") # line 351 + return (changed, msg if msg else None) # line 352 + + def computeSequentialPathSet(_, branch: 'int', revision: 'int'): # line 354 + ''' Returns nothing, just updates _.paths in place. ''' # line 355 + next(_.computeSequentialPathSetIterator(branch, revision, incrementally=False)) # simply invoke the generator once to get full results # line 356 + + def computeSequentialPathSetIterator(_, branch: 'int', revision: 'int', incrementally: 'bool'=True, startwith: 'int'=0) -> '_coconut.typing.Optional[Iterator[Dict[str, PathInfo]]]': # line 358 + ''' In-memory computation of current list of valid PathInfo entries for specified branch and through specified revision. ''' # line 359 + try: # load initial paths # line 360 + _.loadCommit(branch, startwith) # load initial paths # line 360 + except: # no revisions # line 361 + yield {} # no revisions # line 361 + return None # no revisions # line 361 + if incrementally: # line 362 + yield _.paths # line 362 + m = Metadata(_.root) # type: Metadata # next changes TODO avoid loading all metadata and config # line 363 + rev = None # type: int # next changes TODO avoid loading all metadata and config # line 363 + for rev in range(startwith + 1, revision + 1): # line 364 + m.loadCommit(branch, rev) # line 365 + for p, info in m.paths.items(): # line 366 + if info.size == None: # line 367 + del _.paths[p] # line 367 + else: # line 368 + _.paths[p] = info # line 368 + if incrementally: # line 369 + yield _.paths # line 369 + yield None # for the default case - not incrementally # line 370 + + def getTrackingPatterns(_, branch: '_coconut.typing.Optional[int]'=None, negative: 'bool'=False) -> 'FrozenSet[str]': # line 372 + ''' Returns list of tracking patterns (or untracking patterns if negative) for provided branch or current branch. ''' # line 373 + return _coconut.frozenset() if not (_.track or _.picky) else frozenset(_.branches[(_.branch if branch is None else branch)].untracked if negative else _.branches[(_.branch if branch is None else branch)].tracked) # line 374 + + def parseRevisionString(_, argument: 'str') -> 'Tuple[_coconut.typing.Optional[int], _coconut.typing.Optional[int]]': # line 376 ''' Commit identifiers can be str or int for branch, and int for revision. Revision identifiers can be negative, with -1 being last commit. - ''' # line 366 - if argument is None or argument == SLASH: # no branch/revision specified # line 367 - return (_.branch, -1) # no branch/revision specified # line 367 - argument = argument.strip() # line 368 - if argument.startswith(SLASH): # current branch # line 369 - return (_.branch, _.getRevisionByName(argument[1:])) # current branch # line 369 - if argument.endswith(SLASH): # line 370 - try: # line 371 - return (_.getBranchByName(argument[:-1]), -1) # line 371 - except ValueError: # line 372 - Exit("Unknown branch label '%s'" % argument) # line 372 - if SLASH in argument: # line 373 - b, r = argument.split(SLASH)[:2] # line 374 - try: # line 375 - return (_.getBranchByName(b), _.getRevisionByName(r)) # line 375 - except ValueError: # line 376 - Exit("Unknown branch label or wrong number format '%s/%s'" % (b, r)) # line 376 - branch = _.getBranchByName(argument) # type: int # returns number if given (revision) integer # line 377 - if branch not in _.branches: # line 378 - branch = None # line 378 - try: # either branch name/number or reverse/absolute revision number # line 379 - return ((_.branch if branch is None else branch), int(argument if argument else "-1") if branch is None else -1) # either branch name/number or reverse/absolute revision number # line 379 - except: # line 380 - Exit("Unknown branch label or wrong number format") # line 380 - Exit("This should never happen. Please create a issue report") # line 381 - return (None, None) # line 381 - - def findRevision(_, branch: 'int', revision: 'int', nameHash: 'str') -> 'Tuple[int, str]': # line 383 + ''' # line 379 + if argument is None or argument == SLASH: # no branch/revision specified # line 380 + return (_.branch, -1) # no branch/revision specified # line 380 + argument = argument.strip() # line 381 + if argument.startswith(SLASH): # current branch # line 382 + return (_.branch, _.getRevisionByName(argument[1:])) # current branch # line 382 + if argument.endswith(SLASH): # line 383 + try: # line 384 + return (_.getBranchByName(argument[:-1]), -1) # line 384 + except ValueError: # line 385 + Exit("Unknown branch label '%s'" % argument) # line 385 + if SLASH in argument: # line 386 + b, r = argument.split(SLASH)[:2] # line 387 + try: # line 388 + return (_.getBranchByName(b), _.getRevisionByName(r)) # line 388 + except ValueError: # line 389 + Exit("Unknown branch label or wrong number format '%s/%s'" % (b, r)) # line 389 + branch = _.getBranchByName(argument) # type: int # returns number if given (revision) integer # line 390 + if branch not in _.branches: # line 391 + branch = None # line 391 + try: # either branch name/number or reverse/absolute revision number # line 392 + return ((_.branch if branch is None else branch), int(argument if argument else "-1") if branch is None else -1) # either branch name/number or reverse/absolute revision number # line 392 + except: # line 393 + Exit("Unknown branch label or wrong number format") # line 393 + Exit("This should never happen. Please create a issue report") # line 394 + return (None, None) # line 394 + + def findRevision(_, branch: 'int', revision: 'int', nameHash: 'str') -> 'Tuple[int, str]': # line 396 ''' Find latest revision that contained the file physically, not returning the actual parent branch it is stored on. - Returns (highest revision <= specified revision containing the file, file path to file on (actual parent) branch).''' # line 385 - while True: # line 386 - _branch = _.getParentBranch(branch, revision) # type: int # line 387 - source = revisionFolder(_branch, revision, base=_.root, file=nameHash) # type: str # line 388 - if os.path.exists(encode(source)) and os.path.isfile(source): # line 389 - break # line 389 - revision -= 1 # line 390 - if revision < 0: # line 391 - Exit("Cannot determine versioned file '%s' from specified branch '%d'" % (nameHash, branch)) # line 391 - return revision, source # line 392 - - def getParentBranch(_, branch: 'int', revision: 'int') -> 'int': # line 394 - ''' Determine originating branch for a (potentially branched) revision, traversing all branch parents until found. ''' # line 395 - other = _.branches[branch].parent # type: _coconut.typing.Optional[int] # reference to originating parent branch, or None # line 396 - if other is None or revision > _.branches[branch].revision: # need to load commit from other branch instead # line 397 - return branch # need to load commit from other branch instead # line 397 - while _.branches[other].parent is not None and revision <= _.branches[other].revision: # line 398 - other = _.branches[other].parent # line 398 - return other # line 399 - - @_coconut_tco # line 401 - def getHighestRevision(_, branch: 'int') -> '_coconut.typing.Optional[int]': # line 401 - ''' Find highest revision of a branch, even if current branch has no commits. ''' # line 402 - m = Metadata() # type: Metadata # line 403 - other = branch # type: _coconut.typing.Optional[int] # line 404 - while other is not None: # line 405 - m.loadBranch(other) # line 406 - if m.commits: # line 407 - return _coconut_tail_call(max, m.commits) # line 407 - other = _.branches[branch].parent # reference to originating parent branch, or None # line 408 - return None # line 409 - - def copyVersionedFile(_, branch: 'int', revision: 'int', toBranch: 'int', toRevision: 'int', pinfo: 'PathInfo'): # line 411 - ''' Copy versioned file to other branch/revision. ''' # line 412 - target = revisionFolder(toBranch, toRevision, base=_.root, file=pinfo.nameHash) # type: str # line 413 - revision, source = _.findRevision(branch, revision, pinfo.nameHash) # line 414 - shutil.copy2(encode(source), encode(target)) # line 415 - - def readOrCopyVersionedFile(_, branch: 'int', revision: 'int', nameHash: 'str', toFile: '_coconut.typing.Optional[str]'=None) -> '_coconut.typing.Optional[bytes]': # line 417 - ''' Return file contents, or copy contents into file path provided. ''' # line 418 - source = _.findRevision(branch, revision, nameHash)[1] # type: str # revisionFolder(_.getParentBranch(branch, revision), _.findRevision(branch, revision, nameHash)[0], base = _.root, file = nameHash) # line 419 - try: # line 420 - with openIt(source, "r", _.compress) as fd: # line 420 - if toFile is None: # read bytes into memory and return # line 421 - return fd.read() # read bytes into memory and return # line 421 - with open(encode(toFile), "wb") as to: # line 422 - while True: # line 423 - buffer = fd.read(bufSize) # line 424 - to.write(buffer) # line 425 - if len(buffer) < bufSize: # line 426 - break # line 426 - return None # line 427 - except Exception as E: # line 428 - warn("Cannot read versioned file: %r (%d:%d:%s)" % (E, branch, revision, nameHash)) # line 428 - None # line 429 - - def restoreFile(_, relPath: '_coconut.typing.Optional[str]', branch: 'int', revision: 'int', pinfo: 'PathInfo', ensurePath: 'bool'=False) -> '_coconut.typing.Optional[bytes]': # line 431 - ''' Recreate file for given revision, or return binary contents if path is None. ''' # line 432 - if relPath is None: # _.findRevision(branch, revision, pinfo.nameHash)[0], pinfo.nameHash) if pinfo.size > 0 else b'' # just return contents # line 433 - return _.readOrCopyVersionedFile(branch, revision, pinfo.nameHash) if pinfo.size > 0 else b'' # _.findRevision(branch, revision, pinfo.nameHash)[0], pinfo.nameHash) if pinfo.size > 0 else b'' # just return contents # line 433 - target = os.path.join(_.root, relPath.replace(SLASH, os.sep)) # type: str # line 434 - if ensurePath: # and not os.path.exists(encode(os.path.dirname(target))): # line 435 - tryOrIgnore(lambda _=None: os.makedirs(encode(os.path.dirname(target)))) # line 436 - if pinfo.size == 0: # line 437 - with open(encode(target), "wb"): # line 438 - pass # line 438 - try: # update access/modification timestamps on file system # line 439 - os.utime(encode(target), (pinfo.mtime / 1000., pinfo.mtime / 1000.)) # update access/modification timestamps on file system # line 439 - except Exception as E: # line 440 - error("Cannot update file's timestamp after restoration '%r'" % E) # line 440 - return None # line 441 - _revision, source = _.findRevision(branch, revision, pinfo.nameHash) # line 442 + Returns (highest revision <= specified revision containing the file, file path to file on (actual parent) branch).''' # line 398 + while True: # line 399 + _branch = _.getParentBranch(branch, revision) # type: int # line 400 + source = revisionFolder(_branch, revision, base=_.root, file=nameHash) # type: str # line 401 + if os.path.exists(encode(source)) and os.path.isfile(source): # line 402 + break # line 402 + revision -= 1 # line 403 + if revision < 0: # line 404 + Exit("Cannot determine versioned file '%s' from specified branch '%d'" % (nameHash, branch)) # line 404 + return revision, source # line 405 + + def getParentBranches(_, branch: 'int', revision: 'int') -> 'List[int]': # line 407 + ''' Determine originating branch for a (potentially branched) revision, traversing all branch parents until found. ''' # line 408 + others = [_.branches[branch].parent] # type: List[int] # reference to originating parent branch, or None # line 409 + if others[0] is None or revision > _.branches[branch].revision: # found. need to load commit from other branch instead # line 410 + return [branch] # found. need to load commit from other branch instead # line 410 + while _.branches[others[-1]].parent is not None and revision <= _.branches[others[-1]].revision: # find true original branch for revision # line 411 + others.append(_.branches[others[-1]].parent) # find true original branch for revision # line 411 + return others # line 412 + + def getParentBranch(_, branch: 'int', revision: 'int') -> 'int': # line 414 + return _.getParentBranches(branch, revision)[-1] # line 414 + + @_coconut_tco # line 416 + def getHighestRevision(_, branch: 'int') -> '_coconut.typing.Optional[int]': # line 416 + ''' Find highest revision of a branch, even if current branch has no commits. ''' # line 417 + m = Metadata() # type: Metadata # line 418 + other = branch # type: _coconut.typing.Optional[int] # line 419 + while other is not None: # line 420 + m.loadBranch(other) # line 421 + if m.commits: # line 422 + return _coconut_tail_call(max, m.commits) # line 422 + other = _.branches[branch].parent # reference to originating parent branch, or None # line 423 + return None # line 424 + + def copyVersionedFile(_, branch: 'int', revision: 'int', toBranch: 'int', toRevision: 'int', pinfo: 'PathInfo'): # line 426 + ''' Copy versioned file to other branch/revision. ''' # line 427 + target = revisionFolder(toBranch, toRevision, base=_.root, file=pinfo.nameHash) # type: str # line 428 + revision, source = _.findRevision(branch, revision, pinfo.nameHash) # line 429 + shutil.copy2(encode(source), encode(target)) # line 430 + + def readOrCopyVersionedFile(_, branch: 'int', revision: 'int', nameHash: 'str', toFile: '_coconut.typing.Optional[str]'=None) -> '_coconut.typing.Optional[bytes]': # line 432 + ''' Return file contents, or copy contents into file path provided. ''' # line 433 + source = _.findRevision(branch, revision, nameHash)[1] # type: str # revisionFolder(_.getParentBranch(branch, revision), _.findRevision(branch, revision, nameHash)[0], base = _.root, file = nameHash) # line 434 + try: # line 435 + with openIt(source, "r", _.compress) as fd: # line 435 + if toFile is None: # read bytes into memory and return # line 436 + return fd.read() # read bytes into memory and return # line 436 + with open(encode(toFile), "wb") as to: # line 437 + while True: # line 438 + buffer = fd.read(bufSize) # line 439 + to.write(buffer) # line 440 + if len(buffer) < bufSize: # line 441 + break # line 441 + return None # line 442 + except Exception as E: # line 443 + warn("Cannot read versioned file: %r (%d:%d:%s)" % (E, branch, revision, nameHash)) # line 443 + None # line 444 + + def restoreFile(_, relPath: '_coconut.typing.Optional[str]', branch: 'int', revision: 'int', pinfo: 'PathInfo', ensurePath: 'bool'=False) -> '_coconut.typing.Optional[bytes]': # line 446 + ''' Recreate file for given revision, or return binary contents if path is None. ''' # line 447 + if relPath is None: # _.findRevision(branch, revision, pinfo.nameHash)[0], pinfo.nameHash) if pinfo.size > 0 else b'' # just return contents # line 448 + return _.readOrCopyVersionedFile(branch, revision, pinfo.nameHash) if pinfo.size > 0 else b'' # _.findRevision(branch, revision, pinfo.nameHash)[0], pinfo.nameHash) if pinfo.size > 0 else b'' # just return contents # line 448 + target = os.path.join(_.root, relPath.replace(SLASH, os.sep)) # type: str # line 449 + if ensurePath: # and not os.path.exists(encode(os.path.dirname(target))): # line 450 + tryOrIgnore(lambda _=None: os.makedirs(encode(os.path.dirname(target)))) # line 451 + if pinfo.size == 0: # line 452 + with open(encode(target), "wb"): # line 453 + pass # line 453 + try: # update access/modification timestamps on file system # line 454 + os.utime(encode(target), (pinfo.mtime / 1000., pinfo.mtime / 1000.)) # update access/modification timestamps on file system # line 454 + except Exception as E: # line 455 + error("Cannot update file's timestamp after restoration '%r'" % E) # line 455 + return None # line 456 + _revision, source = _.findRevision(branch, revision, pinfo.nameHash) # line 457 # Restore file by copying buffer-wise - with openIt(source, "r", _.compress) as fd, open(encode(target), "wb") as to: # using Coconut's Enhanced Parenthetical Continuation # line 444 - while True: # line 445 - buffer = fd.read(bufSize) # line 446 - to.write(buffer) # line 447 - if len(buffer) < bufSize: # line 448 - break # line 448 - try: # update access/modification timestamps on file system # line 449 - os.utime(encode(target), (pinfo.mtime / 1000., pinfo.mtime / 1000.)) # update access/modification timestamps on file system # line 449 - except Exception as E: # line 450 - error("Cannot update file's timestamp after restoration '%r'" % E) # line 450 - return None # line 451 + with openIt(source, "r", _.compress) as fd, open(encode(target), "wb") as to: # using Coconut's Enhanced Parenthetical Continuation # line 459 + while True: # line 460 + buffer = fd.read(bufSize) # line 461 + to.write(buffer) # line 462 + if len(buffer) < bufSize: # line 463 + break # line 463 + try: # update access/modification timestamps on file system # line 464 + os.utime(encode(target), (pinfo.mtime / 1000., pinfo.mtime / 1000.)) # update access/modification timestamps on file system # line 464 + except Exception as E: # line 465 + error("Cannot update file's timestamp after restoration '%r'" % E) # line 465 + return None # line 466 # Main client operations -def offline(name: '_coconut.typing.Optional[str]'=None, initialMessage: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 455 - ''' Initial command to start working offline. ''' # line 456 - if os.path.exists(encode(metaFolder)): # line 457 - if '--force' not in options: # line 458 - Exit("Repository folder is either already offline or older branches and commits were left over.\nUse 'sos online' to check for out-of-sync branches, or\nWipe existing offline branches with 'sos offline --force'") # line 458 - try: # line 459 - for entry in os.listdir(metaFolder): # line 460 - resource = metaFolder + os.sep + entry # line 461 - if os.path.isdir(resource): # line 462 - shutil.rmtree(encode(resource)) # line 462 - else: # line 463 - os.unlink(encode(resource)) # line 463 - except: # line 464 - Exit("Cannot reliably remove previous repository contents. Please remove .sos folder manually prior to going offline") # line 464 - m = Metadata(offline=True) # type: Metadata # line 465 - if '--strict' in options or m.c.strict: # always hash contents # line 466 - m.strict = True # always hash contents # line 466 - if '--compress' in options or m.c.compress: # plain file copies instead of compressed ones # line 467 - m.compress = True # plain file copies instead of compressed ones # line 467 - if '--picky' in options or m.c.picky: # Git-like # line 468 - m.picky = True # Git-like # line 468 - elif '--track' in options or m.c.track: # Svn-like # line 469 - m.track = True # Svn-like # line 469 - title = usage.getTitle() # type: _coconut.typing.Optional[str] # line 470 - if title: # line 471 - printo(title) # line 471 - if verbose: # line 472 - info(usage.MARKER + "Going offline...") # line 472 - m.createBranch(0, (defaults["defaultbranch"] if name is None else name), ("Offline repository created on %s" % strftime() if initialMessage is None else initialMessage)) # main branch's name may be None (e.g. for fossil) # line 473 - m.branch = 0 # line 474 - m.saveBranches(also={"version": version.__version__}) # stores version info only once. no change immediately after going offline, going back online won't issue a warning # line 475 - info(usage.MARKER + "Offline repository prepared. Use 'sos online' to finish offline work") # line 476 - -def online(options: '_coconut.typing.Sequence[str]'=[]): # line 478 - ''' Finish working offline. ''' # line 479 - if verbose: # line 480 - info(usage.MARKER + "Going back online...") # line 480 - force = '--force' in options # type: bool # line 481 - m = Metadata() # type: Metadata # line 482 - strict = '--strict' in options or m.strict # type: bool # line 483 - m.loadBranches() # line 484 - if any([not b.inSync for b in m.branches.values()]) and not force: # line 485 - Exit("There are still unsynchronized (modified) branches.\nUse 'sos log' to list them.\nUse 'sos commit' and 'sos switch' to commit out-of-sync branches to your VCS before leaving offline mode.\nUse 'sos online --force' to erase all aggregated offline revisions") # line 485 - m.loadBranch(m.branch) # line 486 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # one commit guaranteed for first offline branch, for fast-branched branches a revision in branchinfo # line 487 - if options.count("--force") < 2: # line 488 - m.computeSequentialPathSet(m.branch, maxi) # load all commits up to specified revision # line 489 - changed, msg = m.findChanges(checkContent=strict, considerOnly=None if not (m.track or m.picky) else m.getTrackingPatterns(), dontConsider=None if not (m.track or m.picky) else m.getTrackingPatterns(negative=True), progress='--progress' in options) # HINT no option for --only/--except here on purpose. No check for picky here, because online is not a command that considers staged files (but we could use --only here, alternatively) # line 490 - if modified(changed): # line 491 - Exit("File tree is modified vs. current branch.\nUse 'sos online --force --force' to continue with removing the offline repository") # line 495 - try: # line 496 - shutil.rmtree(encode(metaFolder)) # line 496 - info("Exited offline mode. Continue working with your traditional VCS.") # line 496 - except Exception as E: # line 497 - Exit("Error removing offline repository: %r" % E) # line 497 - info(usage.MARKER + "Offline repository removed, you're back online") # line 498 - -def branch(name: '_coconut.typing.Optional[str]'=None, initialMessage: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 500 +def offline(name: '_coconut.typing.Optional[str]'=None, initialMessage: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 470 + ''' Initial command to start working offline. ''' # line 471 + if os.path.exists(encode(metaFolder)): # line 472 + if '--force' not in options: # line 473 + Exit("Repository folder is either already offline or older branches and commits were left over.\nUse 'sos online' to check for out-of-sync branches, or\nWipe existing offline branches with 'sos offline --force'") # line 473 + try: # line 474 + for entry in os.listdir(metaFolder): # line 475 + resource = metaFolder + os.sep + entry # line 476 + if os.path.isdir(resource): # line 477 + shutil.rmtree(encode(resource)) # line 477 + else: # line 478 + os.unlink(encode(resource)) # line 478 + except: # line 479 + Exit("Cannot reliably remove previous repository contents. Please remove .sos folder manually prior to going offline") # line 479 + m = Metadata(offline=True) # type: Metadata # line 480 + if '--strict' in options or m.c.strict: # always hash contents # line 481 + m.strict = True # always hash contents # line 481 + if '--compress' in options or m.c.compress: # plain file copies instead of compressed ones # line 482 + m.compress = True # plain file copies instead of compressed ones # line 482 + if '--picky' in options or m.c.picky: # Git-like # line 483 + m.picky = True # Git-like # line 483 + elif '--track' in options or m.c.track: # Svn-like # line 484 + m.track = True # Svn-like # line 484 + title = usage.getTitle() # type: _coconut.typing.Optional[str] # line 485 + if title: # line 486 + printo(title) # line 486 + if verbose: # line 487 + info(usage.MARKER + "Going offline...") # line 487 + m.createBranch(0, (defaults["defaultbranch"] if name is None else name), ("Offline repository created on %s" % strftime() if initialMessage is None else initialMessage)) # main branch's name may be None (e.g. for fossil) # line 488 + m.branch = 0 # line 489 + m.saveBranches(also={"version": version.__version__}) # stores version info only once. no change immediately after going offline, going back online won't issue a warning # line 490 + info(usage.MARKER + "Offline repository prepared. Use 'sos online' to finish offline work") # line 491 + +def online(options: '_coconut.typing.Sequence[str]'=[]): # line 493 + ''' Finish working offline. ''' # line 494 + if verbose: # line 495 + info(usage.MARKER + "Going back online...") # line 495 + force = '--force' in options # type: bool # line 496 + m = Metadata() # type: Metadata # line 497 + strict = '--strict' in options or m.strict # type: bool # line 498 + m.loadBranches() # line 499 + if any([not b.inSync for b in m.branches.values()]) and not force: # line 500 + Exit("There are still unsynchronized (modified) branches.\nUse 'sos log' to list them.\nUse 'sos commit' and 'sos switch' to commit out-of-sync branches to your VCS before leaving offline mode.\nUse 'sos online --force' to erase all aggregated offline revisions") # line 500 + m.loadBranch(m.branch) # line 501 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # one commit guaranteed for first offline branch, for fast-branched branches a revision in branchinfo # line 502 + if options.count("--force") < 2: # line 503 + m.computeSequentialPathSet(m.branch, maxi) # load all commits up to specified revision # line 504 + changed, msg = m.findChanges(checkContent=strict, considerOnly=None if not (m.track or m.picky) else m.getTrackingPatterns(), dontConsider=None if not (m.track or m.picky) else m.getTrackingPatterns(negative=True), progress='--progress' in options) # HINT no option for --only/--except here on purpose. No check for picky here, because online is not a command that considers staged files (but we could use --only here, alternatively) # line 505 + if modified(changed): # line 506 + Exit("File tree is modified vs. current branch.\nUse 'sos online --force --force' to continue with removing the offline repository") # line 510 + try: # line 511 + shutil.rmtree(encode(metaFolder)) # line 511 + info("Exited offline mode. Continue working with your traditional VCS.") # line 511 + except Exception as E: # line 512 + Exit("Error removing offline repository: %r" % E) # line 512 + info(usage.MARKER + "Offline repository removed, you're back online") # line 513 + +def branch(name: '_coconut.typing.Optional[str]'=None, initialMessage: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 515 ''' Create a new branch (from file tree or last revision) and (by default) continue working on it. Force not necessary, as either branching from last revision anyway, or branching file tree anyway. - ''' # line 503 - last = '--last' in options # type: bool # use last revision for branching, not current file tree # line 504 - stay = '--stay' in options # type: bool # continue on current branch after branching (don't switch) # line 505 - fast = '--fast' in options # type: bool # branch by referencing TODO move to default and use --full instead for old behavior # line 506 - m = Metadata() # type: Metadata # line 507 - m.loadBranch(m.branch) # line 508 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # line 509 - if name and m.getBranchByName(name) is not None: # attempted to create a named branch # line 510 - Exit("Branch '%s' already exists. Cannot proceed" % name) # attempted to create a named branch # line 510 - branch = max(m.branches.keys()) + 1 # next branch's key - this isn't atomic but we assume single-user non-concurrent use here # line 511 - if verbose: # line 512 - info(usage.MARKER + "Branching to %sbranch b%d%s%s..." % ("unnamed " if name is None else "", branch, " '%s'" % name if name is not None else "", " from last revision" if last else "")) # line 512 - if last: # branch from last revision # line 513 - m.duplicateBranch(branch, name, (initialMessage + " " if initialMessage else "") + "(Branched from b%d/r%02d)" % (m.branch, maxi), not fast) # branch from last revision # line 513 - else: # branch from current file tree state # line 514 - m.createBranch(branch, name, ("Branched from file tree after b%d/r%02d" % (m.branch, maxi) if initialMessage is None else initialMessage)) # branch from current file tree state # line 514 - if not stay: # line 515 - m.branch = branch # line 515 - m.saveBranches() # TODO or indent again? # line 516 - info(usage.MARKER + "%s new %sbranch b%d%s" % ("Continue work after branching" if stay else "Switched to", "unnamed " if name is None else "", branch, " '%s'" % name if name else "")) # line 517 - -def changes(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None) -> 'ChangeSet': # line 519 - ''' Show changes of file tree vs. (last or specified) revision on current or specified branch. ''' # line 520 - m = Metadata() # type: Metadata # line 521 - branch = None # type: _coconut.typing.Optional[int] # line 521 - revision = None # type: _coconut.typing.Optional[int] # line 521 - strict = '--strict' in options or m.strict # type: bool # line 522 - branch, revision = m.parseRevisionString(argument) # line 523 - if branch not in m.branches: # line 524 - Exit("Unknown branch") # line 524 - m.loadBranch(branch) # knows commits # line 525 - revision = m.correctNegativeIndexing(revision) # m.branches[branch].revision if not m.commits else (revision if revision >= 0 else max(m.commits) + 1 + revision) # negative indexing # line 526 + ''' # line 518 + last = '--last' in options # type: bool # use last revision for branching, not current file tree # line 519 + stay = '--stay' in options # type: bool # continue on current branch after branching (don't switch) # line 520 + fast = '--fast' in options # type: bool # branch by referencing TODO move to default and use --full instead for old behavior # line 521 + m = Metadata() # type: Metadata # line 522 + m.loadBranch(m.branch) # line 523 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # line 524 + if name and m.getBranchByName(name) is not None: # attempted to create a named branch # line 525 + Exit("Branch '%s' already exists. Cannot proceed" % name) # attempted to create a named branch # line 525 + branch = max(m.branches.keys()) + 1 # next branch's key - this isn't atomic but we assume single-user non-concurrent use here # line 526 if verbose: # line 527 - info(usage.MARKER + "Changes of file tree vs. revision '%s/r%02d'" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 527 - m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision # line 528 - changed, msg = m.findChanges(checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, m.getTrackingPatterns() | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((m.getTrackingPatterns(negative=True) | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # line 529 - m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else time.time()) # line 534 - return changed # returning for unit tests only TODO remove? # line 535 - -def _diff(m: 'Metadata', branch: 'int', revision: 'int', changed: 'ChangeSet', ignoreWhitespace: 'bool', textWrap: 'bool'=False): # TODO introduce option to diff against committed revision # line 537 - ''' The diff display code. ''' # line 538 - wrap = (lambda s: s) if textWrap else (lambda s: s[:termWidth]) # type: _coconut.typing.Callable[[str], str] # HINT since we don't know the actual width of unicode strings, we cannot be sure this is really maximizing horizontal space (like ljust), but probably not worth iteratively finding the right size # line 539 - onlyBinaryModifications = dataCopy(ChangeSet, changed, modifications={k: v for k, v in changed.modifications.items() if not m.isTextType(os.path.basename(k))}) # type: ChangeSet # line 540 - m.listChanges(onlyBinaryModifications, commitTime=m.commits[max(m.commits)].ctime) # only list modified binary files # line 541 - for path, pinfo in (c for c in changed.modifications.items() if m.isTextType(os.path.basename(c[0]))): # only consider modified text files # line 542 - content = b"" # type: _coconut.typing.Optional[bytes] # line 543 - if pinfo.size != 0: # versioned file # line 544 - content = m.restoreFile(None, branch, revision, pinfo) # versioned file # line 544 - assert content is not None # versioned file # line 544 - abspath = os.path.normpath(os.path.join(m.root, path.replace(SLASH, os.sep))) # type: str # current file # line 545 - blocks = None # type: List[MergeBlock] # line 546 - nl = None # type: bytes # line 546 - blocks, nl = merge(filename=abspath, into=content, diffOnly=True, ignoreWhitespace=ignoreWhitespace) # only determine change blocks # line 547 - printo("DIF %s%s %s" % (path, " " if len(blocks) == 1 and blocks[0].tipe == MergeBlockType.KEEP else "", NL_NAMES[nl])) # line 548 - linemax = pure.requiredDecimalDigits(max([block.line for block in blocks]) if len(blocks) > 0 else 1) # type: int # line 549 - for block in blocks: # line 550 + info(usage.MARKER + "Branching to %sbranch b%d%s%s..." % ("unnamed " if name is None else "", branch, " '%s'" % name if name is not None else "", " from last revision" if last else "")) # line 527 + if last: # branch from last revision # line 528 + m.duplicateBranch(branch, name, (initialMessage + " " if initialMessage else "") + "(Branched from b%d/r%02d)" % (m.branch, maxi), not fast) # branch from last revision # line 528 + else: # branch from current file tree state # line 529 + m.createBranch(branch, name, ("Branched from file tree after b%d/r%02d" % (m.branch, maxi) if initialMessage is None else initialMessage)) # branch from current file tree state # line 529 + if not stay: # line 530 + m.branch = branch # line 530 + m.saveBranches() # TODO or indent again? # line 531 + info(usage.MARKER + "%s new %sbranch b%d%s" % ("Continue work after branching" if stay else "Switched to", "unnamed " if name is None else "", branch, " '%s'" % name if name else "")) # line 532 + +def changes(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None) -> 'ChangeSet': # line 534 + ''' Show changes of file tree vs. (last or specified) revision on current or specified branch. ''' # line 535 + m = Metadata() # type: Metadata # line 536 + branch = None # type: _coconut.typing.Optional[int] # line 536 + revision = None # type: _coconut.typing.Optional[int] # line 536 + strict = '--strict' in options or m.strict # type: bool # line 537 + branch, revision = m.parseRevisionString(argument) # line 538 + if branch not in m.branches: # line 539 + Exit("Unknown branch") # line 539 + m.loadBranch(branch) # knows commits # line 540 + revision = m.correctNegativeIndexing(revision) # m.branches[branch].revision if not m.commits else (revision if revision >= 0 else max(m.commits) + 1 + revision) # negative indexing # line 541 + if verbose: # line 542 + info(usage.MARKER + "Changes of file tree vs. revision '%s/r%02d'" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 542 + m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision # line 543 + changed, msg = m.findChanges(checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, m.getTrackingPatterns() | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((m.getTrackingPatterns(negative=True) | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # line 544 + m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else time.time()) # line 549 + return changed # returning for unit tests only TODO remove? # line 550 + +def _diff(m: 'Metadata', branch: 'int', revision: 'int', changed: 'ChangeSet', ignoreWhitespace: 'bool', textWrap: 'bool'=False): # TODO introduce option to diff against committed revision # line 552 + ''' The diff display code. ''' # line 553 + wrap = (lambda s: s) if textWrap else (lambda s: s[:termWidth]) # type: _coconut.typing.Callable[[str], str] # HINT since we don't know the actual width of unicode strings, we cannot be sure this is really maximizing horizontal space (like ljust), but probably not worth iteratively finding the right size # line 554 + onlyBinaryModifications = dataCopy(ChangeSet, changed, modifications={k: v for k, v in changed.modifications.items() if not m.isTextType(os.path.basename(k))}) # type: ChangeSet # line 555 + m.listChanges(onlyBinaryModifications, commitTime=m.commits[max(m.commits)].ctime) # only list modified binary files # line 556 + for path, pinfo in (c for c in changed.modifications.items() if m.isTextType(os.path.basename(c[0]))): # only consider modified text files # line 557 + content = b"" # type: _coconut.typing.Optional[bytes] # line 558 + if pinfo.size != 0: # versioned file # line 559 + content = m.restoreFile(None, branch, revision, pinfo) # versioned file # line 559 + assert content is not None # versioned file # line 559 + abspath = os.path.normpath(os.path.join(m.root, path.replace(SLASH, os.sep))) # type: str # current file # line 560 + blocks = None # type: List[MergeBlock] # line 561 + nl = None # type: bytes # line 561 + blocks, nl = merge(filename=abspath, into=content, diffOnly=True, ignoreWhitespace=ignoreWhitespace) # only determine change blocks # line 562 + printo("DIF %s%s %s" % (path, " " if len(blocks) == 1 and blocks[0].tipe == MergeBlockType.KEEP else "", NL_NAMES[nl])) # line 563 + linemax = pure.requiredDecimalDigits(max([block.line for block in blocks]) if len(blocks) > 0 else 1) # type: int # line 564 + for block in blocks: # line 565 # if block.tipe in [MergeBlockType.INSERT, MergeBlockType.REMOVE]: # pass # TODO print some previous and following lines - which aren't accessible here anymore - if block.tipe == MergeBlockType.INSERT: # TODO show color via (n)curses or other library? # line 553 - for no, line in enumerate(block.lines): # line 554 - printo(wrap("--- %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 554 - elif block.tipe == MergeBlockType.REMOVE: # line 555 - for no, line in enumerate(block.lines): # line 556 - printo(wrap("+++ %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 556 - elif block.tipe == MergeBlockType.REPLACE: # line 557 - for no, line in enumerate(block.replaces.lines): # line 558 - printo(wrap("-~- %%0%dd |%%s|" % linemax % (no + block.replaces.line, line))) # line 558 - for no, line in enumerate(block.lines): # line 559 - printo(wrap("+~+ %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 559 + if block.tipe == MergeBlockType.INSERT: # TODO show color via (n)curses or other library? # line 568 + for no, line in enumerate(block.lines): # line 569 + printo(wrap("--- %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 569 + elif block.tipe == MergeBlockType.REMOVE: # line 570 + for no, line in enumerate(block.lines): # line 571 + printo(wrap("+++ %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 571 + elif block.tipe == MergeBlockType.REPLACE: # line 572 + for no, line in enumerate(block.replaces.lines): # line 573 + printo(wrap("-~- %%0%dd |%%s|" % linemax % (no + block.replaces.line, line))) # line 573 + for no, line in enumerate(block.lines): # line 574 + printo(wrap("+~+ %%0%dd |%%s|" % linemax % (no + block.line, line))) # line 574 # elif block.tipe == MergeBlockType.KEEP: pass # TODO allow to show kept stuff, or a part of pre-post lines # elif block.tipe == MergeBlockType.MOVE: # intra-line modifications - if block.tipe != MergeBlockType.KEEP: # line 562 - printo() # line 562 - -def diff(argument: 'str'="", options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 564 - ''' Show text file differences of file tree vs. (last or specified) revision on current or specified branch. ''' # line 565 - m = Metadata() # type: Metadata # line 566 - branch = None # type: _coconut.typing.Optional[int] # line 566 - revision = None # type: _coconut.typing.Optional[int] # line 566 - strict = '--strict' in options or m.strict # type: bool # line 567 - ignoreWhitespace = '--ignore-whitespace' in options or '--iw' in options # type: bool # line 568 - wrap = '--wrap' in options # type: bool # allow text to wrap around # line 569 - branch, revision = m.parseRevisionString(argument) # if nothing given, use last commit # line 570 - if branch not in m.branches: # line 571 - Exit("Unknown branch") # line 571 - m.loadBranch(branch) # knows commits # line 572 - revision = m.correctNegativeIndexing(revision) # m.branches[branch].revision if not m.commits else (revision if revision >= 0 else max(m.commits) + 1 + revision) # negative indexing # line 573 - if verbose: # line 574 - info(usage.MARKER + "Textual differences of file tree vs. revision '%s/r%02d'" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 574 - m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision # line 575 - changed, msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, m.getTrackingPatterns() | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((m.getTrackingPatterns(negative=True) | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # line 576 - _diff(m, branch, revision, changed, ignoreWhitespace=ignoreWhitespace, textWrap=wrap) # line 581 - -def commit(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 583 - ''' Create new revision from file tree changes vs. last commit. ''' # line 584 - m = Metadata() # type: Metadata # line 585 - if argument is not None and argument in m.tags: # line 586 - Exit("Illegal commit message. It was already used as a tag name") # line 586 - trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # SVN-like mode # line 587 + if block.tipe != MergeBlockType.KEEP: # line 577 + printo() # line 577 + +def diff(argument: 'str'="", options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 579 + ''' Show text file differences of file tree vs. (last or specified) revision on current or specified branch. ''' # line 580 + m = Metadata() # type: Metadata # line 581 + branch = None # type: _coconut.typing.Optional[int] # line 581 + revision = None # type: _coconut.typing.Optional[int] # line 581 + strict = '--strict' in options or m.strict # type: bool # line 582 + ignoreWhitespace = '--ignore-whitespace' in options or '--iw' in options # type: bool # line 583 + wrap = '--wrap' in options # type: bool # allow text to wrap around # line 584 + branch, revision = m.parseRevisionString(argument) # if nothing given, use last commit # line 585 + if branch not in m.branches: # line 586 + Exit("Unknown branch") # line 586 + m.loadBranch(branch) # knows commits # line 587 + revision = m.correctNegativeIndexing(revision) # m.branches[branch].revision if not m.commits else (revision if revision >= 0 else max(m.commits) + 1 + revision) # negative indexing # line 588 + if verbose: # line 589 + info(usage.MARKER + "Textual differences of file tree vs. revision '%s/r%02d'" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 589 + m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision # line 590 + changed, msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, m.getTrackingPatterns() | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((m.getTrackingPatterns(negative=True) | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # line 591 + _diff(m, branch, revision, changed, ignoreWhitespace=ignoreWhitespace, textWrap=wrap) # line 596 + +def commit(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 598 + ''' Create new revision from file tree changes vs. last commit. ''' # line 599 + m = Metadata() # type: Metadata # line 600 + if argument is not None and argument in m.tags: # line 601 + Exit("Illegal commit message. It was already used as a tag name") # line 601 + trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # SVN-like mode # line 602 # No untracking patterns needed here - if m.picky and not trackingPatterns: # line 589 - Exit("No file patterns staged for commit in picky mode") # line 589 - if verbose: # line 590 - info((lambda _coconut_none_coalesce_item: "b%d" % m.branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(usage.MARKER + "Committing changes to branch '%s'..." % m.branches[m.branch].name)) # line 590 - m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options, check=False, commit=True, onlys=onlys, excps=excps) # special flag creates new revision for detected changes, but aborts if no changes # line 591 - changed = dataCopy(ChangeSet, changed, moves=detectMoves(changed)) # line 592 - m.paths = {k: v for k, v in changed.additions.items()} # copy to avoid wrong file numbers report below # line 593 - m.paths.update(changed.modifications) # update pathset to changeset only # line 594 - (m.paths.update)({k: dataCopy(PathInfo, v, size=None, hash=None) for k, v in changed.deletions.items()}) # line 595 - m.saveCommit(m.branch, revision) # revision has already been incremented # line 596 - m.commits[revision] = CommitInfo(number=revision, ctime=int(time.time() * 1000), message=argument) # comment can be None # line 597 - m.saveBranch(m.branch) # line 598 - m.loadBranches() # TODO is it necessary to load again? # line 599 - if m.picky: # remove tracked patterns # line 600 - m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], tracked=[], inSync=False) # remove tracked patterns # line 600 - else: # track or simple mode: set branch modified # line 601 - m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], inSync=False) # track or simple mode: set branch modified # line 601 - if "--tag" in options and argument is not None: # memorize unique tag # line 602 - m.tags.append(argument) # memorize unique tag # line 602 - info("Version was tagged with %s" % argument) # memorize unique tag # line 602 - m.saveBranches() # line 603 - printo(usage.MARKER + "Created new revision r%02d%s (+%02d/-%02d/%s%02d/%s%02d)" % (revision, ((" '%s'" % argument) if argument is not None else ""), len(changed.additions) - len(changed.moves), len(changed.deletions) - len(changed.moves), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(changed.modifications), MOVE_SYMBOL if m.c.useUnicodeFont else "#", len(changed.moves))) # line 604 - -def status(argument: '_coconut.typing.Optional[str]'=None, vcs: '_coconut.typing.Optional[str]'=None, cmd: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 606 - ''' Show branches and current repository state. ''' # line 607 - m = Metadata() # type: Metadata # line 608 - if not (m.c.useChangesCommand or '--repo' in options): # line 609 - changes(argument, options, onlys, excps) # line 609 - return # line 609 - current = m.branch # type: int # line 610 - strict = '--strict' in options or m.strict # type: bool # line 611 - info(usage.MARKER + "Offline repository status") # line 612 - info("Repository root: %s" % os.getcwd()) # line 613 - info("Underlying VCS root: %s" % vcs) # line 614 - info("Underlying VCS type: %s" % cmd) # line 615 - info("Installation path: %s" % os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) # because sos/sos.py # line 616 - info("Current SOS version: %s" % version.__version__) # line 617 - info("At creation version: %s" % m.version) # line 618 - info("Metadata format: %s" % m.format) # line 619 - info("Content checking: %sactivated" % ("" if m.strict else "de")) # line 620 - info("Data compression: %sactivated" % ("" if m.compress else "de")) # line 621 - info("Repository mode: %s" % ("track" if m.track else ("picky" if m.picky else "simple"))) # line 622 - info("Number of branches: %d" % len(m.branches)) # line 623 - trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # line 624 - untrackingPatterns = m.getTrackingPatterns(negative=True) # type: FrozenSet[str] # line 625 - m.loadBranch(current) # line 626 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # line 627 - m.computeSequentialPathSet(current, maxi) # load all commits up to specified revision # line 508 # line 628 - changed, _msg = m.findChanges(checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns), dontConsider=excps if not (m.track or m.picky) else (untrackingPatterns if excps is None else excps), progress=True) # line 629 - printo("%s File tree %s" % ((CROSS_SYMBOL if m.c.useUnicodeFont else "!") if modified(changed) else (CHECKMARK_SYMBOL if m.c.useUnicodeFont else " "), "has changes" if modified(changed) else "is unchanged")) # TODO use other marks if no unicode console detected TODO bad choice of symbols for changed vs. unchanged # line 634 - sl = max([len((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(b.name)) for b in m.branches.values()]) # type: int # line 635 - for branch in sorted(m.branches.values(), key=lambda b: b.number): # line 636 - payload = 0 # type: int # count used storage per branch # line 637 - overhead = 0 # type: int # count used storage per branch # line 637 - original = 0 # type: int # count used storage per branch # line 637 - for dn, ds, fs in os.walk(branchFolder(branch.number)): # line 638 - for f in fs: # TODO count all backup folders as overhead instead? check "onlydeveloped" code for that logic # line 639 - if f == metaFile or f.endswith(BACKUP_SUFFIX): # line 640 - overhead += tryOrDefault(lambda _=None: os.stat(encode(os.path.join(dn, f))).st_size, 0) # line 640 - else: # line 641 - payload += tryOrDefault(lambda _=None: os.stat(encode(os.path.join(dn, f))).st_size, 0) # line 641 - pl_amount = float(payload) / MEBI # type: float # line 642 - oh_amount = float(overhead) / MEBI # type: float # line 642 + if m.picky and not trackingPatterns: # line 604 + Exit("No file patterns staged for commit in picky mode") # line 604 + if verbose: # line 605 + info((lambda _coconut_none_coalesce_item: "b%d" % m.branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(usage.MARKER + "Committing changes to branch '%s'..." % m.branches[m.branch].name)) # line 605 + m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options, check=False, commit=True, onlys=onlys, excps=excps) # special flag creates new revision for detected changes, but aborts if no changes # line 606 + changed = dataCopy(ChangeSet, changed, moves=detectMoves(changed)) # line 607 + m.paths = {k: v for k, v in changed.additions.items()} # copy to avoid wrong file numbers report below # line 608 + m.paths.update(changed.modifications) # update pathset to changeset only # line 609 + (m.paths.update)({k: dataCopy(PathInfo, v, size=None, hash=None) for k, v in changed.deletions.items()}) # line 610 + m.saveCommit(m.branch, revision) # revision has already been incremented # line 611 + m.commits[revision] = CommitInfo(number=revision, ctime=int(time.time() * 1000), message=argument) # comment can be None # line 612 + m.saveBranch(m.branch) # line 613 + m.loadBranches() # TODO is it necessary to load again? # line 614 + if m.picky: # remove tracked patterns # line 615 + m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], tracked=[], inSync=False) # remove tracked patterns # line 615 + else: # track or simple mode: set branch modified # line 616 + m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], inSync=False) # track or simple mode: set branch modified # line 616 + if "--tag" in options and argument is not None: # memorize unique tag # line 617 + m.tags.append(argument) # memorize unique tag # line 617 + info("Version was tagged with %s" % argument) # memorize unique tag # line 617 + m.saveBranches() # line 618 + printo(usage.MARKER + "Created new revision r%02d%s (+%02d/-%02d/%s%02d/%s%02d)" % (revision, ((" '%s'" % argument) if argument is not None else ""), len(changed.additions) - len(changed.moves), len(changed.deletions) - len(changed.moves), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(changed.modifications), MOVE_SYMBOL if m.c.useUnicodeFont else "#", len(changed.moves))) # line 619 + +def status(argument: '_coconut.typing.Optional[str]'=None, vcs: '_coconut.typing.Optional[str]'=None, cmd: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 621 + ''' Show branches and current repository state. ''' # line 622 + m = Metadata() # type: Metadata # line 623 + if not (m.c.useChangesCommand or '--repo' in options): # line 624 + changes(argument, options, onlys, excps) # line 624 + return # line 624 + current = m.branch # type: int # line 625 + strict = '--strict' in options or m.strict # type: bool # line 626 + info(usage.MARKER + "Offline repository status") # line 627 + info("Repository root: %s" % os.getcwd()) # line 628 + info("Underlying VCS root: %s" % vcs) # line 629 + info("Underlying VCS type: %s" % cmd) # line 630 + info("Installation path: %s" % os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) # because sos/sos.py # line 631 + info("Current SOS version: %s" % version.__version__) # line 632 + info("At creation version: %s" % m.version) # line 633 + info("Metadata format: %s" % m.format) # line 634 + info("Content checking: %sactivated" % ("" if m.strict else "de")) # line 635 + info("Data compression: %sactivated" % ("" if m.compress else "de")) # line 636 + info("Repository mode: %s" % ("track" if m.track else ("picky" if m.picky else "simple"))) # line 637 + info("Number of branches: %d" % len(m.branches)) # line 638 + trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # line 639 + untrackingPatterns = m.getTrackingPatterns(negative=True) # type: FrozenSet[str] # line 640 + m.loadBranch(current) # line 641 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: _coconut.typing.Optional[int] # line 642 + if maxi is not None: # load all commits up to specified revision, except no commits # line 643 + m.computeSequentialPathSet(current, maxi) # load all commits up to specified revision, except no commits # line 643 + changed, _msg = m.findChanges(checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns), dontConsider=excps if not (m.track or m.picky) else (untrackingPatterns if excps is None else excps), progress=True) # line 644 + printo("%s File tree %s" % ((CROSS_SYMBOL if m.c.useUnicodeFont else "!") if modified(changed) else (CHECKMARK_SYMBOL if m.c.useUnicodeFont else " "), "has changes" if modified(changed) else "is unchanged")) # TODO use other marks if no unicode console detected TODO bad choice of symbols for changed vs. unchanged # line 649 + sl = max([len((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(b.name)) for b in m.branches.values()]) # type: int # line 650 + for branch in sorted(m.branches.values(), key=lambda b: b.number): # line 651 + payload = 0 # type: int # count used storage per branch # line 652 + overhead = 0 # type: int # count used storage per branch # line 652 + original = 0 # type: int # count used storage per branch # line 652 + for dn, ds, fs in os.walk(branchFolder(branch.number)): # line 653 + for f in fs: # TODO count all backup folders as overhead instead? check "onlydeveloped" code for that logic # line 654 + if f == metaFile or f.endswith(BACKUP_SUFFIX): # line 655 + overhead += tryOrDefault(lambda _=None: os.stat(encode(os.path.join(dn, f))).st_size, 0) # line 655 + else: # line 656 + payload += tryOrDefault(lambda _=None: os.stat(encode(os.path.join(dn, f))).st_size, 0) # line 656 + pl_amount = float(payload) / MEBI # type: float # line 657 + oh_amount = float(overhead) / MEBI # type: float # line 657 # if pl_amount >= 1100.: convert to string - m.loadBranch(branch.number) # knows commit history # line 644 - for commit_ in range(1 + max(m.commits) if m.commits else 0): # line 645 - m.loadCommit(m.branch, commit_) # line 646 - for pinfo in m.paths.values(): # line 647 - original += (lambda _coconut_none_coalesce_item: 0 if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(pinfo.size) # line 647 - maxi = max(m.commits) if m.commits else m.branches[branch.number].revision # line 648 - printo(" %s b%d%s @%s (%s) with %d commits, using %.2f MiB (+%.3f%% SOS overhead%s)%s" % ("*" if current == branch.number else " ", branch.number, ((" %%%ds" % (sl + 2)) % ("'%s'" % branch.name)) if branch.name else "", strftime(branch.ctime), "in sync" if branch.inSync else "modified", len(m.commits), pl_amount + oh_amount, oh_amount * 100. / (pl_amount + oh_amount), ", %s compression/deduplication" % (("%.2f%s" % (float(original) / float(payload), MULT_SYMBOL if m.c.useUnicodeFont else "x")) if payload > 0 else "full") if m.compress or (len(m.commits) > 0 and len(m.commits) != max(m.commits) + 1) else "", (". Last comment: '%s'" % m.commits[maxi].message) if maxi in m.commits and m.commits[maxi].message else "")) # line 649 - if m.track or m.picky and (len(m.branches[m.branch].tracked) > 0 or len(m.branches[m.branch].untracked) > 0): # line 650 - info("\nTracked file patterns:") # TODO print matching untracking patterns side-by-side # line 651 - printo(ajoin(" | ", m.branches[m.branch].tracked, "\n")) # line 652 - info("\nUntracked file patterns:") # line 653 - printo(ajoin(" | ", m.branches[m.branch].untracked, "\n")) # line 654 - -def exitOnChanges(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], check: 'bool'=True, commit: 'bool'=False, onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None) -> 'Tuple[Metadata, _coconut.typing.Optional[int], int, ChangeSet, bool, bool, FrozenSet[str], FrozenSet[str]]': # line 656 + m.loadBranch(branch.number) # knows commit history # line 659 + for commit_ in range(1 + max(m.commits) if m.commits else 0): # line 660 + m.loadCommit(m.branch, commit_) # line 661 + for pinfo in m.paths.values(): # line 662 + original += (lambda _coconut_none_coalesce_item: 0 if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(pinfo.size) # line 662 + maxi = max(m.commits) if m.commits else m.branches[branch.number].revision # line 663 + printo(" %s b%d%s @%s (%s) with %d commits, using %.2f MiB (+%.3f%% SOS overhead%s)%s" % ("*" if current == branch.number else " ", branch.number, ((" %%%ds" % (sl + 2)) % (("'%s'" % branch.name) if branch.name else "")), strftime(branch.ctime), "in sync" if branch.inSync else "modified", len(m.commits), pl_amount + oh_amount, oh_amount * 100. / (pl_amount + oh_amount), ", %s compression/deduplication" % (("%.2f%s" % (float(original) / float(payload), MULT_SYMBOL if m.c.useUnicodeFont else "x")) if payload > 0 else "full") if m.compress or (len(m.commits) > 0 and len(m.commits) != max(m.commits) + 1) else "", (". Last comment: '%s'" % m.commits[maxi].message) if maxi in m.commits and m.commits[maxi].message else "")) # line 664 + if m.track or m.picky and (len(m.branches[m.branch].tracked) > 0 or len(m.branches[m.branch].untracked) > 0): # line 665 + info("\nTracked file patterns:") # TODO print matching untracking patterns side-by-side # line 666 + printo(ajoin(" | ", m.branches[m.branch].tracked, "\n")) # line 667 + info("\nUntracked file patterns:") # line 668 + printo(ajoin(" | ", m.branches[m.branch].untracked, "\n")) # line 669 + +def exitOnChanges(argument: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[], check: 'bool'=True, commit: 'bool'=False, onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None) -> 'Tuple[Metadata, _coconut.typing.Optional[int], int, ChangeSet, bool, bool, FrozenSet[str], FrozenSet[str]]': # line 671 ''' Common behavior for switch, update, delete, commit. Should not be called for picky mode, unless tracking patterns were already added. argument: optional branch/revision, used only in switch and update check: stop program on detected change (default yes) commit: don't stop on changes and write to file system Returns (Metadata, (current or target) branch, revision, set of changes vs. last commit on current branch, strict, force flags. - ''' # line 663 - assert not (check and commit) # line 664 - m = Metadata() # type: Metadata # line 665 - force = '--force' in options # type: bool # line 666 - strict = '--strict' in options or m.strict # type: bool # line 667 - if argument is not None: # line 668 - branch, revision = m.parseRevisionString(argument) # for early abort # line 669 - if branch is None: # line 670 - Exit("Branch '%s' doesn't exist. Cannot proceed" % argument) # line 670 - m.loadBranch(m.branch) # knows last commits of *current* branch # line 671 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # line 672 + ''' # line 678 + assert not (check and commit) # line 679 + m = Metadata() # type: Metadata # line 680 + force = '--force' in options # type: bool # line 681 + strict = '--strict' in options or m.strict # type: bool # line 682 + if argument is not None: # line 683 + branch, revision = m.parseRevisionString(argument) # for early abort # line 684 + if branch is None: # line 685 + Exit("Branch '%s' doesn't exist. Cannot proceed" % argument) # line 685 + m.loadBranch(m.branch) # knows last commits of *current* branch # line 686 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # line 687 # Determine current changes - trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # line 675 - untrackingPatterns = m.getTrackingPatterns(negative=True) # type: FrozenSet[str] # line 676 - m.computeSequentialPathSet(m.branch, maxi) # load all commits up to specified revision # line 677 - changed, msg = m.findChanges(m.branch if commit else None, maxi + 1 if commit else None, checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns), dontConsider=excps if not (m.track or m.picky) else (untrackingPatterns if excps is None else excps), progress='--progress' in options) # line 678 - if check and modified(changed) and not force: # line 683 - m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else 0) # line 684 - Exit("File tree contains changes. Use --force to proceed") # line 685 - elif commit: # line 686 - if not modified(changed) and not force: # line 687 - Exit("Nothing to commit") # line 687 - m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else 0) # line 688 - if msg: # line 689 - printo(msg) # line 689 - - if argument is not None: # branch/revision specified # line 691 - m.loadBranch(branch) # knows commits of target branch # line 692 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # line 693 - revision = m.correctNegativeIndexing(revision) # line 694 - return (m, branch, revision, changed, strict, force, m.getTrackingPatterns(branch), m.getTrackingPatterns(branch, negative=True)) # line 695 - return (m, m.branch, maxi + (1 if commit else 0), changed, strict, force, trackingPatterns, untrackingPatterns) # line 696 - -def switch(argument: 'str', options: 'List[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 698 - ''' Continue work on another branch, replacing file tree changes. ''' # line 699 - m, branch, revision, changed, strict, _force, trackingPatterns, untrackingPatterns = exitOnChanges(argument, ["--force"] + options) # force continuation to delay check to this function # line 700 - force = '--force' in options # type: bool # needed as we fake force in above access # line 701 + trackingPatterns = m.getTrackingPatterns() # type: FrozenSet[str] # line 690 + untrackingPatterns = m.getTrackingPatterns(negative=True) # type: FrozenSet[str] # line 691 + m.computeSequentialPathSet(m.branch, maxi) # load all commits up to specified revision # line 692 + changed, msg = m.findChanges(m.branch if commit else None, maxi + 1 if commit else None, checkContent=strict, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns), dontConsider=excps if not (m.track or m.picky) else (untrackingPatterns if excps is None else excps), progress='--progress' in options) # line 693 + if check and modified(changed) and not force: # line 698 + m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else 0) # line 699 + Exit("File tree contains changes. Use --force to proceed") # line 700 + elif commit: # line 701 + if not modified(changed) and not force: # line 702 + Exit("Nothing to commit") # line 702 + m.listChanges(changed, commitTime=m.commits[max(m.commits)].ctime if m.commits else 0) # line 703 + if msg: # line 704 + printo(msg) # line 704 + + if argument is not None: # branch/revision specified # line 706 + m.loadBranch(branch) # knows commits of target branch # line 707 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # line 708 + revision = m.correctNegativeIndexing(revision) # line 709 + return (m, branch, revision, changed, strict, force, m.getTrackingPatterns(branch), m.getTrackingPatterns(branch, negative=True)) # line 710 + return (m, m.branch, maxi + (1 if commit else 0), changed, strict, force, trackingPatterns, untrackingPatterns) # line 711 + +def switch(argument: 'str', options: 'List[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 713 + ''' Continue work on another branch, replacing file tree changes. ''' # line 714 + m, branch, revision, changed, strict, _force, trackingPatterns, untrackingPatterns = exitOnChanges(argument, ["--force"] + options) # force continuation to delay check to this function # line 715 + force = '--force' in options # type: bool # needed as we fake force in above access # line 716 # Determine file changes from other branch to current file tree - if '--meta' in options: # only switch meta data # line 704 - m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], tracked=m.branches[branch].tracked, untracked=m.branches[branch].untracked) # line 705 - else: # full file switch # line 706 - m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision for target branch into memory # line 707 - todos, _msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((untrackingPatterns | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # determine difference of other branch vs. file tree (forced or in sync with current branch; "addition" means exists now and should be removed) # line 708 + if '--meta' in options: # only switch meta data # line 719 + m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], tracked=m.branches[branch].tracked, untracked=m.branches[branch].untracked) # line 720 + else: # full file switch # line 721 + m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision for target branch into memory # line 722 + todos, _msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingPatterns | m.getTrackingPatterns(branch)), dontConsider=excps if not (m.track or m.picky) else ((untrackingPatterns | m.getTrackingPatterns(branch, negative=True)) if excps is None else excps), progress='--progress' in options) # determine difference of other branch vs. file tree (forced or in sync with current branch; "addition" means exists now and should be removed) # line 723 # Now check for potential conflicts - changed.deletions.clear() # local deletions never create conflicts, modifications always # line 715 - rms = [] # type: _coconut.typing.Sequence[str] # local additions can be ignored if restoration from switch would be same # line 716 - for a, pinfo in changed.additions.items(): # has potential corresponding re-add in switch operation: # line 717 - if a in todos.deletions and pinfo.size == todos.deletions[a].size and (pinfo.hash == todos.deletions[a].hash if m.strict else pinfo.mtime == todos.deletions[a].mtime): # line 718 - rms.append(a) # line 718 - for rm in rms: # TODO could also silently accept remote DEL for local ADD # line 719 - del changed.additions[rm] # TODO could also silently accept remote DEL for local ADD # line 719 - if modified(changed) and not force: # line 720 - m.listChanges(changed) # line 720 - Exit("File tree contains changes. Use --force to proceed") # line 720 - if verbose: # line 721 - info(usage.MARKER + "Switching to branch %sb%d/r%02d..." % ("'%s' " % m.branches[branch].name if m.branches[branch].name else "", branch, revision)) # line 721 - if not modified(todos): # line 722 - info("No changes to current file tree") # line 723 - else: # integration required # line 724 - for path, pinfo in todos.deletions.items(): # line 725 - m.restoreFile(path, branch, revision, pinfo, ensurePath=True) # is deleted in current file tree: restore from branch to reach target state # line 726 - printo("ADD " + path) # line 727 - for path, pinfo in todos.additions.items(): # line 728 - os.unlink(encode(os.path.join(m.root, path.replace(SLASH, os.sep)))) # is added in current file tree: remove from branch to reach target state # line 729 - printo("DEL " + path) # line 730 - for path, pinfo in todos.modifications.items(): # line 731 - m.restoreFile(path, branch, revision, pinfo) # is modified in current file tree: restore from branch to reach target # line 732 - printo("MOD " + path) # line 733 - m.branch = branch # line 734 - m.saveBranches() # store switched path info # line 735 - info(usage.MARKER + "Switched to branch %sb%d/r%02d" % ("'%s' " % (m.branches[branch].name if m.branches[branch].name else ""), branch, revision)) # line 736 - -def update(argument: 'str', options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 738 + changed.deletions.clear() # local deletions never create conflicts, modifications always # line 730 + rms = [] # type: _coconut.typing.Sequence[str] # local additions can be ignored if restoration from switch would be same # line 731 + for a, pinfo in changed.additions.items(): # has potential corresponding re-add in switch operation: # line 732 + if a in todos.deletions and pinfo.size == todos.deletions[a].size and (pinfo.hash == todos.deletions[a].hash if m.strict else pinfo.mtime == todos.deletions[a].mtime): # line 733 + rms.append(a) # line 733 + for rm in rms: # TODO could also silently accept remote DEL for local ADD # line 734 + del changed.additions[rm] # TODO could also silently accept remote DEL for local ADD # line 734 + if modified(changed) and not force: # line 735 + m.listChanges(changed) # line 735 + Exit("File tree contains changes. Use --force to proceed") # line 735 + if verbose: # line 736 + info(usage.MARKER + "Switching to branch %sb%d/r%02d..." % ("'%s' " % m.branches[branch].name if m.branches[branch].name else "", branch, revision)) # line 736 + if not modified(todos): # line 737 + info("No changes to current file tree") # line 738 + else: # integration required # line 739 + for path, pinfo in todos.deletions.items(): # line 740 + m.restoreFile(path, branch, revision, pinfo, ensurePath=True) # is deleted in current file tree: restore from branch to reach target state # line 741 + printo("ADD " + path) # line 742 + for path, pinfo in todos.additions.items(): # line 743 + os.unlink(encode(os.path.join(m.root, path.replace(SLASH, os.sep)))) # is added in current file tree: remove from branch to reach target state # line 744 + printo("DEL " + path) # line 745 + for path, pinfo in todos.modifications.items(): # line 746 + m.restoreFile(path, branch, revision, pinfo) # is modified in current file tree: restore from branch to reach target # line 747 + printo("MOD " + path) # line 748 + m.branch = branch # line 749 + m.saveBranches() # store switched path info # line 750 + info(usage.MARKER + "Switched to branch %sb%d/r%02d" % ("'%s' " % (m.branches[branch].name if m.branches[branch].name else ""), branch, revision)) # line 751 + +def update(argument: 'str', options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 753 ''' Load and integrate a specified other branch/revision into current life file tree. In tracking mode, this also updates the set of tracked patterns. User options for merge operation: --add/--rm/--ask --add-lines/--rm-lines/--ask-lines (inside each file), --add-chars/--rm-chars/--ask-chars - ''' # line 742 - mrg = getAnyOfMap({"--add": MergeOperation.INSERT, "--rm": MergeOperation.REMOVE, "--ask": MergeOperation.ASK}, options, MergeOperation.BOTH) # type: MergeOperation # default operation is replicate remote state # line 743 - mrgline = getAnyOfMap({'--add-lines': MergeOperation.INSERT, '--rm-lines': MergeOperation.REMOVE, "--ask-lines": MergeOperation.ASK}, options, mrg) # type: MergeOperation # default operation for modified files is same as for files # line 744 - mrgchar = getAnyOfMap({'--add-chars': MergeOperation.INSERT, '--rm-chars': MergeOperation.REMOVE, "--ask-chars": MergeOperation.ASK}, options, mrgline) # type: MergeOperation # default operation for modified files is same as for lines # line 745 - eol = '--eol' in options # type: bool # use remote eol style # line 746 - m = Metadata() # type: Metadata # TODO same is called inside stop on changes - could return both current and designated branch instead # line 747 - currentBranch = m.branch # type: _coconut.typing.Optional[int] # line 748 - m, branch, revision, changes_, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(argument, options, check=False, onlys=onlys, excps=excps) # don't check for current changes, only parse arguments # line 749 - if verbose: # line 750 - info(usage.MARKER + "Integrating changes from '%s/r%02d' into file tree..." % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 750 + ''' # line 757 + mrg = getAnyOfMap({"--add": MergeOperation.INSERT, "--rm": MergeOperation.REMOVE, "--ask": MergeOperation.ASK}, options, MergeOperation.BOTH) # type: MergeOperation # default operation is replicate remote state # line 758 + mrgline = getAnyOfMap({'--add-lines': MergeOperation.INSERT, '--rm-lines': MergeOperation.REMOVE, "--ask-lines": MergeOperation.ASK}, options, mrg) # type: MergeOperation # default operation for modified files is same as for files # line 759 + mrgchar = getAnyOfMap({'--add-chars': MergeOperation.INSERT, '--rm-chars': MergeOperation.REMOVE, "--ask-chars": MergeOperation.ASK}, options, mrgline) # type: MergeOperation # default operation for modified files is same as for lines # line 760 + eol = '--eol' in options # type: bool # use remote eol style # line 761 + m = Metadata() # type: Metadata # TODO same is called inside stop on changes - could return both current and designated branch instead # line 762 + currentBranch = m.branch # type: _coconut.typing.Optional[int] # line 763 + m, branch, revision, changes_, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(argument, options, check=False, onlys=onlys, excps=excps) # don't check for current changes, only parse arguments # line 764 + if verbose: # line 765 + info(usage.MARKER + "Integrating changes from '%s/r%02d' into file tree..." % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 765 # Determine file changes from other branch over current file tree - m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision for branch to integrate # line 753 - trackingUnion = trackingPatterns | m.getTrackingPatterns(branch) # type: FrozenSet[str] # line 754 - untrackingUnion = untrackingPatterns | m.getTrackingPatterns(branch, negative=True) # type: FrozenSet[str] # line 755 - changed, _msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingUnion), dontConsider=excps if not (m.track or m.picky) else (untrackingUnion if onlys is None else onlys), progress='--progress' in options) # determine difference of other branch vs. file tree. "addition" means exists now but not in other, and should be removed unless in tracking mode # line 756 - if mrg != MergeOperation.ASK and not changed.modifications and not (mrg.value & MergeOperation.INSERT.value and changed.additions or (mrg.value & MergeOperation.REMOVE.value and changed.deletions)): # no file ops, TODO ASK handling is clumsy here # line 761 - if trackingUnion != trackingPatterns: # nothing added # line 762 - info("No file changes detected, but tracking patterns were merged (run 'sos switch /-1 --meta' to undo)") # TODO write test to see if this works # line 763 - else: # line 764 - info("Nothing to update") # but write back updated branch info below # line 765 - else: # integration required # line 766 - add_all = None # type: _coconut.typing.Optional[str] # line 767 - del_all = None # type: _coconut.typing.Optional[str] # line 767 - selection = None # type: str # line 767 - if changed.deletions.items(): # line 768 - printo("Additions:") # line 768 - for path, pinfo in changed.deletions.items(): # file-based update. Deletions mark files not present in current file tree -> needs addition! # line 769 - selection = "y" if mrg.value & MergeOperation.INSERT.value else "n" # default for non-ask case # line 770 - if add_all is None and mrg == MergeOperation.ASK: # line 771 - selection = user_input(" Restore %r? *[Y]es, [N]o, yes to [A]ll, n[O] to all: " % path, "ynao", "y") # line 772 - if selection in "ao": # line 773 - add_all = "y" if selection == "a" else "n" # line 773 - selection = add_all # line 773 - if "y" in (add_all, selection): # deleted in current file tree: restore from branch to reach target # line 774 - m.restoreFile(path, branch, revision, pinfo, ensurePath=True) # deleted in current file tree: restore from branch to reach target # line 774 - printo(("ADD " if "y" in (add_all, selection) else "(A) ") + path) # TODO document (A) as "selected not to add by user choice" # line 775 - if changed.additions.items(): # line 776 - printo("Deletions:") # line 776 - for path, pinfo in changed.additions.items(): # line 777 - if m.track or m.picky: # because untracked files of other branch cannot be detected (which is good) # line 778 - Exit("This should never happen. Please create an issue report on Github") # because untracked files of other branch cannot be detected (which is good) # line 778 - selection = "y" if mrg.value & MergeOperation.REMOVE.value else "n" # line 779 - if del_all is None and mrg == MergeOperation.ASK: # line 780 - selection = user_input(" Delete %r? *[Y]es, [N]o, yes to [A]ll, n[O] to all: " % path, "ynao", "y") # line 781 - if selection in "ao": # line 782 - del_all = "y" if selection == "a" else "n" # line 782 - selection = del_all # line 782 - if "y" in (del_all, selection): # line 783 - os.unlink(encode(m.root + os.sep + path.replace(SLASH, os.sep))) # line 783 - printo(("DEL " if "y" in (del_all, selection) else "(D) ") + path) # not contained in other branch, but maybe kept # line 784 - if changed.modifications.items(): # line 785 - printo("Modifications:") # line 785 - for path, pinfo in changed.modifications.items(): # line 786 - into = os.path.normpath(os.path.join(m.root, path.replace(SLASH, os.sep))) # type: str # line 787 - binary = not m.isTextType(path) # type: bool # line 788 - op = "m" # type: str # merge as default for text files, always asks for binary (TODO unless --theirs or --mine) # line 789 - if mrg == MergeOperation.ASK or binary: # TODO this may ask user even if no interaction was asked for # line 790 - printo(("MOD " if not binary else "BIN ") + path) # TODO print mtime, size differences? # line 791 - op = user_input(" Resolve %r: *M[I]ne (skip), [T]heirs" % into + (": " if binary else ", [M]erge: "), "it" if binary else "itm", "i") # line 792 - if op == "t": # line 793 - printo("THR " + path) # blockwise copy of contents # line 794 - m.readOrCopyVersionedFile(branch, revision, pinfo.nameHash, toFile=into) # blockwise copy of contents # line 794 - elif op == "m": # line 795 - with open(encode(into), "rb") as fd: # TODO slurps current file # line 796 - current = fd.read() # type: bytes # TODO slurps current file # line 796 - file = m.readOrCopyVersionedFile(branch, revision, pinfo.nameHash) if pinfo.size > 0 else b'' # type: _coconut.typing.Optional[bytes] # parse lines # line 797 - if current == file and verbose: # line 798 - info("No difference to versioned file") # line 798 - elif file is not None: # if None, error message was already logged # line 799 - merged = None # type: bytes # line 800 - nl = None # type: bytes # line 800 - merged, nl = merge(file=file, into=current, mergeOperation=mrgline, charMergeOperation=mrgchar, eol=eol) # line 801 - if merged != current: # line 802 - with open(encode(path), "wb") as fd: # TODO write to temp file first, in case writing fails # line 803 - fd.write(merged) # TODO write to temp file first, in case writing fails # line 803 - elif verbose: # TODO but update timestamp? # line 804 - info("No change") # TODO but update timestamp? # line 804 - else: # mine or wrong input # line 805 - printo("MNE " + path) # nothing to do! same as skip # line 806 - info(usage.MARKER + "Integrated changes from '%s/r%02d' into file tree" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 807 - m.branches[currentBranch] = dataCopy(BranchInfo, m.branches[currentBranch], inSync=False, tracked=list(trackingUnion)) # line 808 - m.branch = currentBranch # need to restore setting before saving TODO operate on different objects instead # line 809 - m.saveBranches() # line 810 - -def destroy(argument: 'str', options: '_coconut.typing.Sequence[str]'=[]): # line 812 - ''' Remove a branch entirely. ''' # line 813 - m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options) # line 814 - if len(m.branches) == 1: # line 815 - Exit("Cannot remove the only remaining branch. Use 'sos online' to leave offline mode") # line 815 - branch, revision = m.parseRevisionString(argument) # not from exitOnChanges, because we have to set argument to None there # line 816 - if branch is None or branch not in m.branches: # line 817 - Exit("Cannot delete unknown branch %r" % branch) # line 817 - if verbose: # line 818 - info(usage.MARKER + "Removing branch b%d%s..." % (branch, " '%s'" % ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name)))) # line 818 - binfo = m.removeBranch(branch) # need to keep a reference to removed entry for output below # line 819 - info(usage.MARKER + "Branch b%d%s removed" % (branch, " '%s'" % ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(binfo.name)))) # line 820 - -def add(relPath: 'str', pattern: 'str', options: '_coconut.typing.Sequence[str]'=[], negative: 'bool'=False): # line 822 - ''' Add a tracked files pattern to current branch's tracked files. negative means tracking blacklisting. ''' # line 823 - force = '--force' in options # type: bool # line 824 - m = Metadata() # type: Metadata # line 825 - if not (m.track or m.picky): # line 826 - Exit("Repository is in simple mode. Create offline repositories via 'sos offline --track' or 'sos offline --picky' or configure a user-wide default via 'sos config track on'") # line 826 - patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 827 - if pattern in patterns: # line 828 - Exit("Pattern '%s' already tracked" % pattern) # line 828 - if not force and not os.path.exists(encode(relPath.replace(SLASH, os.sep))): # line 829 - Exit("The pattern folder doesn't exist. Use --force to add the file pattern anyway") # line 829 - if not force and len(fnmatch.filter(os.listdir(os.path.abspath(relPath.replace(SLASH, os.sep))), os.path.basename(pattern.replace(SLASH, os.sep)))) == 0: # doesn't match any current file # line 830 - Exit("Pattern doesn't match any file in specified folder. Use --force to add it anyway") # line 831 - patterns.append(pattern) # line 832 - m.saveBranches() # line 833 - info(usage.MARKER + "Added tracking pattern '%s' for folder '%s'" % (os.path.basename(pattern.replace(SLASH, os.sep)), os.path.abspath(relPath))) # line 834 - -def remove(relPath: 'str', pattern: 'str', negative: 'bool'=False): # line 836 - ''' Remove a tracked files pattern from current branch's tracked files. ''' # line 837 - m = Metadata() # type: Metadata # line 838 - if not (m.track or m.picky): # line 839 - Exit("Repository is in simple mode. Needs 'offline --track' or 'offline --picky' instead") # line 839 - patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 840 - if pattern not in patterns: # line 841 - suggestion = _coconut.set() # type: Set[str] # line 842 - for pat in patterns: # line 843 - if fnmatch.fnmatch(pattern, pat): # line 843 - suggestion.add(pat) # line 843 - if suggestion: # TODO use same wording as in move # line 844 - printo("Do you mean any of the following tracked file patterns? '%s'" % (", ".join(sorted(suggestion)))) # TODO use same wording as in move # line 844 - Exit("Tracked pattern '%s' not found" % pattern) # line 845 - patterns.remove(pattern) # line 846 - m.saveBranches() # line 847 - info(usage.MARKER + "Removed tracking pattern '%s' for folder '%s'" % (os.path.basename(pattern), os.path.abspath(relPath.replace(SLASH, os.sep)))) # line 848 - -def ls(folder: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 850 - ''' List specified directory, augmenting with repository metadata. ''' # line 851 - m = Metadata() # type: Metadata # line 852 - folder = (os.getcwd() if folder is None else folder) # line 853 - if '--all' in options: # always start at SOS repo root with --all # line 854 - folder = m.root # always start at SOS repo root with --all # line 854 - recursive = '--recursive' in options or '-r' in options or '--all' in options # type: bool # line 855 - patterns = '--patterns' in options or '-p' in options # type: bool # line 856 - DOT = (DOT_SYMBOL if m.c.useUnicodeFont else " ") * 3 # type: str # TODO or "."? # line 857 - if verbose: # line 858 - info(usage.MARKER + "Repository is in %s mode" % ("tracking" if m.track else ("picky" if m.picky else "simple"))) # line 858 - relPath = relativize(m.root, os.path.join(folder, "-"))[0] # type: str # line 859 - if relPath.startswith(os.pardir): # line 860 - Exit("Cannot list contents of folder outside offline repository") # line 860 - trackingPatterns = m.getTrackingPatterns() if m.track or m.picky else _coconut.frozenset() # type: _coconut.typing.Optional[FrozenSet[str]] # for current branch # line 861 - untrackingPatterns = m.getTrackingPatterns(negative=True) if m.track or m.picky else _coconut.frozenset() # type: _coconut.typing.Optional[FrozenSet[str]] # for current branch # line 862 - if '--tags' in options: # TODO this has nothing to do with "ls" - it's an entirely different command. Move if something like "sos tag" has been implemented # line 863 - if len(m.tags) > 0: # line 864 - printo(ajoin("TAG ", sorted(m.tags), nl="\n")) # line 864 - return # line 865 - for dirpath, dirnames, _filenames in os.walk(folder): # line 866 - if not recursive: # avoid recursion # line 867 - dirnames.clear() # avoid recursion # line 867 - dirnames[:] = sorted([decode(d) for d in dirnames]) # line 868 - dirnames[:] = [d for d in dirnames if len([n for n in m.c.ignoreDirs if fnmatch.fnmatch(d, n)]) == 0 or len([p for p in m.c.ignoreDirsWhitelist if fnmatch.fnmatch(d, p)]) > 0] # global ignores # line 869 - - folder = decode(dirpath) # line 871 - relPath = relativize(m.root, os.path.join(folder, "-"))[0] # line 872 - if patterns: # line 873 - out = ajoin("TRK ", [os.path.basename(p) for p in trackingPatterns if os.path.dirname(p).replace(os.sep, SLASH) == relPath], nl="\n") # type: str # line 874 - if out: # line 875 - printo("DIR %s\n" % relPath + out) # line 875 - continue # with next folder # line 876 - files = list(sorted((entry for entry in os.listdir(folder) if os.path.isfile(os.path.join(folder, entry))))) # type: List[str] # line 877 - if len(files) > 0: # line 878 - printo("DIR %s" % relPath) # line 878 - for file in files: # for each file list all tracking patterns that match, or none (e.g. in picky mode after commit) # line 879 - ignore = None # type: _coconut.typing.Optional[str] # line 880 - for ig in m.c.ignores: # remember first match # line 881 - if fnmatch.fnmatch(file, ig): # remember first match # line 881 - ignore = ig # remember first match # line 881 - break # remember first match # line 881 - if ignore: # found a white list entry for ignored file, undo ignoring it # line 882 - for wl in m.c.ignoresWhitelist: # found a white list entry for ignored file, undo ignoring it # line 882 - if fnmatch.fnmatch(file, wl): # found a white list entry for ignored file, undo ignoring it # line 882 - ignore = None # found a white list entry for ignored file, undo ignoring it # line 882 - break # found a white list entry for ignored file, undo ignoring it # line 882 - matches = [] # type: List[str] # line 883 - if not ignore: # line 884 - for pattern in (p for p in trackingPatterns if os.path.dirname(p).replace(os.sep, SLASH) == relPath): # only patterns matching current folder # line 885 - if fnmatch.fnmatch(file, os.path.basename(pattern)): # line 886 - matches.append(os.path.basename(pattern)) # line 886 - matches.sort(key=lambda element: len(element)) # sort in-place # line 887 - printo("%s %s%s" % ("IGN" if ignore is not None else ("TRK" if len(matches) > 0 else DOT), file, " (%s)" % ignore if ignore is not None else (" (%s)" % ("; ".join(matches)) if len(matches) > 0 else ""))) # line 888 - -def log(options: '_coconut.typing.Sequence[str]'=[]): # line 890 - ''' List previous commits on current branch. ''' # line 891 - changes_ = "--changes" in options # type: bool # line 892 - diff_ = "--diff" in options # type: bool # line 893 - number_ = tryOrDefault(lambda _=None: int(sys.argv[sys.argv.index("-n") + 1]), None) # type: _coconut.typing.Optional[int] # line 894 - m = Metadata() # type: Metadata # line 895 - m.loadBranch(m.branch) # knows commit history # line 896 - maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # one commit guaranteed for first offline branch, for fast-branched branches a revision in branchinfo # line 897 - info((lambda _coconut_none_coalesce_item: "r%02d" % m.branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(usage.MARKER + "Offline commit history of branch '%s'" % m.branches[m.branch].name)) # TODO also retain info of "from branch/revision" on branching? # line 898 - nl = len("%d" % maxi) # type: int # determine space needed for revision # line 899 - changesetIterator = m.computeSequentialPathSetIterator(m.branch, maxi) # type: _coconut.typing.Optional[Iterator[Dict[str, PathInfo]]] # line 900 - olds = _coconut.frozenset() # type: FrozenSet[str] # last revision's entries # line 901 - last = {} # type: Dict[str, PathInfo] # path infos from previous revision # line 902 - commit = None # type: CommitInfo # line 903 - n = Metadata() # type: Metadata # used for reading parent branch information # line 904 - for no in range(maxi + 1): # line 905 - if no in m.commits: # line 906 - commit = m.commits[no] # line 906 - else: # line 907 - if n.branch != n.getParentBranch(m.branch, no): # line 908 - n.loadBranch(n.getParentBranch(m.branch, no)) # line 908 - commit = n.commits[no] # line 909 - nxts = next(changesetIterator) # type: Dict[str, PathInfo] # line 910 - news = frozenset(nxts.keys()) # type: FrozenSet[str] # line 911 - if "--all" in options or no >= max(0, maxi + 1 - ((lambda _coconut_none_coalesce_item: m.c.logLines if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(number_))): # line 912 - _add = news - olds # type: FrozenSet[str] # line 913 - _del = olds - news # type: FrozenSet[str] # line 914 + m.computeSequentialPathSet(branch, revision) # load all commits up to specified revision for branch to integrate # line 768 + trackingUnion = trackingPatterns | m.getTrackingPatterns(branch) # type: FrozenSet[str] # line 769 + untrackingUnion = untrackingPatterns | m.getTrackingPatterns(branch, negative=True) # type: FrozenSet[str] # line 770 + changed, _msg = m.findChanges(checkContent=strict, inverse=True, considerOnly=onlys if not (m.track or m.picky) else pure.conditionalIntersection(onlys, trackingUnion), dontConsider=excps if not (m.track or m.picky) else (untrackingUnion if onlys is None else onlys), progress='--progress' in options) # determine difference of other branch vs. file tree. "addition" means exists now but not in other, and should be removed unless in tracking mode # line 771 + if mrg != MergeOperation.ASK and not changed.modifications and not (mrg.value & MergeOperation.INSERT.value and changed.additions or (mrg.value & MergeOperation.REMOVE.value and changed.deletions)): # no file ops, TODO ASK handling is clumsy here # line 776 + if trackingUnion != trackingPatterns: # nothing added # line 777 + info("No file changes detected, but tracking patterns were merged (run 'sos switch /-1 --meta' to undo)") # TODO write test to see if this works # line 778 + else: # line 779 + info("Nothing to update") # but write back updated branch info below # line 780 + else: # integration required # line 781 + add_all = None # type: _coconut.typing.Optional[str] # line 782 + del_all = None # type: _coconut.typing.Optional[str] # line 782 + selection = None # type: str # line 782 + if changed.deletions.items(): # line 783 + printo("Additions:") # line 783 + for path, pinfo in changed.deletions.items(): # file-based update. Deletions mark files not present in current file tree -> needs addition! # line 784 + selection = "y" if mrg.value & MergeOperation.INSERT.value else "n" # default for non-ask case # line 785 + if add_all is None and mrg == MergeOperation.ASK: # line 786 + selection = user_input(" Restore %r? *[Y]es, [N]o, yes to [A]ll, n[O] to all: " % path, "ynao", "y") # line 787 + if selection in "ao": # line 788 + add_all = "y" if selection == "a" else "n" # line 788 + selection = add_all # line 788 + if "y" in (add_all, selection): # deleted in current file tree: restore from branch to reach target # line 789 + m.restoreFile(path, branch, revision, pinfo, ensurePath=True) # deleted in current file tree: restore from branch to reach target # line 789 + printo(("ADD " if "y" in (add_all, selection) else "(A) ") + path) # TODO document (A) as "selected not to add by user choice" # line 790 + if changed.additions.items(): # line 791 + printo("Deletions:") # line 791 + for path, pinfo in changed.additions.items(): # line 792 + if m.track or m.picky: # because untracked files of other branch cannot be detected (which is good) # line 793 + Exit("This should never happen. Please create an issue report on Github") # because untracked files of other branch cannot be detected (which is good) # line 793 + selection = "y" if mrg.value & MergeOperation.REMOVE.value else "n" # line 794 + if del_all is None and mrg == MergeOperation.ASK: # line 795 + selection = user_input(" Delete %r? *[Y]es, [N]o, yes to [A]ll, n[O] to all: " % path, "ynao", "y") # line 796 + if selection in "ao": # line 797 + del_all = "y" if selection == "a" else "n" # line 797 + selection = del_all # line 797 + if "y" in (del_all, selection): # line 798 + os.unlink(encode(m.root + os.sep + path.replace(SLASH, os.sep))) # line 798 + printo(("DEL " if "y" in (del_all, selection) else "(D) ") + path) # not contained in other branch, but maybe kept # line 799 + if changed.modifications.items(): # line 800 + printo("Modifications:") # line 800 + for path, pinfo in changed.modifications.items(): # line 801 + into = os.path.normpath(os.path.join(m.root, path.replace(SLASH, os.sep))) # type: str # line 802 + binary = not m.isTextType(path) # type: bool # line 803 + op = "m" # type: str # merge as default for text files, always asks for binary (TODO unless --theirs or --mine) # line 804 + if mrg == MergeOperation.ASK or binary: # TODO this may ask user even if no interaction was asked for # line 805 + printo(("MOD " if not binary else "BIN ") + path) # TODO print mtime, size differences? # line 806 + op = user_input(" Resolve %r: *M[I]ne (skip), [T]heirs" % into + (": " if binary else ", [M]erge: "), "it" if binary else "itm", "i") # line 807 + if op == "t": # line 808 + printo("THR " + path) # blockwise copy of contents # line 809 + m.readOrCopyVersionedFile(branch, revision, pinfo.nameHash, toFile=into) # blockwise copy of contents # line 809 + elif op == "m": # line 810 + with open(encode(into), "rb") as fd: # TODO slurps current file # line 811 + current = fd.read() # type: bytes # TODO slurps current file # line 811 + file = m.readOrCopyVersionedFile(branch, revision, pinfo.nameHash) if pinfo.size > 0 else b'' # type: _coconut.typing.Optional[bytes] # parse lines # line 812 + if current == file and verbose: # line 813 + info("No difference to versioned file") # line 813 + elif file is not None: # if None, error message was already logged # line 814 + merged = None # type: bytes # line 815 + nl = None # type: bytes # line 815 + merged, nl = merge(file=file, into=current, mergeOperation=mrgline, charMergeOperation=mrgchar, eol=eol) # line 816 + if merged != current: # line 817 + with open(encode(path), "wb") as fd: # TODO write to temp file first, in case writing fails # line 818 + fd.write(merged) # TODO write to temp file first, in case writing fails # line 818 + elif verbose: # TODO but update timestamp? # line 819 + info("No change") # TODO but update timestamp? # line 819 + else: # mine or wrong input # line 820 + printo("MNE " + path) # nothing to do! same as skip # line 821 + info(usage.MARKER + "Integrated changes from '%s/r%02d' into file tree" % ((lambda _coconut_none_coalesce_item: "b%d" % branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision)) # line 822 + m.branches[currentBranch] = dataCopy(BranchInfo, m.branches[currentBranch], inSync=False, tracked=list(trackingUnion)) # line 823 + m.branch = currentBranch # need to restore setting before saving TODO operate on different objects instead # line 824 + m.saveBranches() # line 825 + +def destroy(argument: 'str', options: '_coconut.typing.Sequence[str]'=[]): # line 827 + ''' Remove a branch entirely. ''' # line 828 + m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options) # line 829 + if len(m.branches) == 1: # line 830 + Exit("Cannot remove the only remaining branch. Use 'sos online' to leave offline mode") # line 830 + branch, revision = m.parseRevisionString(argument) # not from exitOnChanges, because we have to set argument to None there # line 831 + if branch is None or branch not in m.branches: # line 832 + Exit("Cannot delete unknown branch %r" % branch) # line 832 + if verbose: # line 833 + info(usage.MARKER + "Removing branch b%d%s..." % (branch, " '%s'" % ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name)))) # line 833 + binfo = m.removeBranch(branch) # need to keep a reference to removed entry for output below # line 834 + info(usage.MARKER + "Branch b%d%s removed" % (branch, " '%s'" % ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(binfo.name)))) # line 835 + +def add(relPath: 'str', pattern: 'str', options: '_coconut.typing.Sequence[str]'=[], negative: 'bool'=False): # line 837 + ''' Add a tracked files pattern to current branch's tracked files. negative means tracking blacklisting. ''' # line 838 + force = '--force' in options # type: bool # line 839 + m = Metadata() # type: Metadata # line 840 + if not (m.track or m.picky): # line 841 + Exit("Repository is in simple mode. Create offline repositories via 'sos offline --track' or 'sos offline --picky' or configure a user-wide default via 'sos config track on'") # line 841 + patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 842 + if pattern in patterns: # line 843 + Exit("Pattern '%s' already tracked" % pattern) # line 843 + if not force and not os.path.exists(encode(relPath.replace(SLASH, os.sep))): # line 844 + Exit("The pattern folder doesn't exist. Use --force to add the file pattern anyway") # line 844 + if not force and len(fnmatch.filter(os.listdir(os.path.abspath(relPath.replace(SLASH, os.sep))), os.path.basename(pattern.replace(SLASH, os.sep)))) == 0: # doesn't match any current file # line 845 + Exit("Pattern doesn't match any file in specified folder. Use --force to add it anyway") # line 846 + patterns.append(pattern) # line 847 + m.saveBranches() # line 848 + info(usage.MARKER + "Added tracking pattern '%s' for folder '%s'" % (os.path.basename(pattern.replace(SLASH, os.sep)), os.path.abspath(relPath))) # line 849 + +def remove(relPath: 'str', pattern: 'str', negative: 'bool'=False): # line 851 + ''' Remove a tracked files pattern from current branch's tracked files. ''' # line 852 + m = Metadata() # type: Metadata # line 853 + if not (m.track or m.picky): # line 854 + Exit("Repository is in simple mode. Needs 'offline --track' or 'offline --picky' instead") # line 854 + patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 855 + if pattern not in patterns: # line 856 + suggestion = _coconut.set() # type: Set[str] # line 857 + for pat in patterns: # line 858 + if fnmatch.fnmatch(pattern, pat): # line 858 + suggestion.add(pat) # line 858 + if suggestion: # TODO use same wording as in move # line 859 + printo("Do you mean any of the following tracked file patterns? '%s'" % (", ".join(sorted(suggestion)))) # TODO use same wording as in move # line 859 + Exit("Tracked pattern '%s' not found" % pattern) # line 860 + patterns.remove(pattern) # line 861 + m.saveBranches() # line 862 + info(usage.MARKER + "Removed tracking pattern '%s' for folder '%s'" % (os.path.basename(pattern), os.path.abspath(relPath.replace(SLASH, os.sep)))) # line 863 + +def ls(folder: '_coconut.typing.Optional[str]'=None, options: '_coconut.typing.Sequence[str]'=[]): # line 865 + ''' List specified directory, augmenting with repository metadata. ''' # line 866 + m = Metadata() # type: Metadata # line 867 + folder = (os.getcwd() if folder is None else folder) # line 868 + if '--all' in options: # always start at SOS repo root with --all # line 869 + folder = m.root # always start at SOS repo root with --all # line 869 + recursive = '--recursive' in options or '-r' in options or '--all' in options # type: bool # line 870 + patterns = '--patterns' in options or '-p' in options # type: bool # line 871 + DOT = (DOT_SYMBOL if m.c.useUnicodeFont else " ") * 3 # type: str # TODO or "."? # line 872 + if verbose: # line 873 + info(usage.MARKER + "Repository is in %s mode" % ("tracking" if m.track else ("picky" if m.picky else "simple"))) # line 873 + relPath = relativize(m.root, os.path.join(folder, "-"))[0] # type: str # line 874 + if relPath.startswith(os.pardir): # line 875 + Exit("Cannot list contents of folder outside offline repository") # line 875 + trackingPatterns = m.getTrackingPatterns() if m.track or m.picky else _coconut.frozenset() # type: _coconut.typing.Optional[FrozenSet[str]] # for current branch # line 876 + untrackingPatterns = m.getTrackingPatterns(negative=True) if m.track or m.picky else _coconut.frozenset() # type: _coconut.typing.Optional[FrozenSet[str]] # for current branch # line 877 + if '--tags' in options: # TODO this has nothing to do with "ls" - it's an entirely different command. Move if something like "sos tag" has been implemented # line 878 + if len(m.tags) > 0: # line 879 + printo(ajoin("TAG ", sorted(m.tags), nl="\n")) # line 879 + return # line 880 + for dirpath, dirnames, _filenames in os.walk(folder): # line 881 + if not recursive: # avoid recursion # line 882 + dirnames.clear() # avoid recursion # line 882 + dirnames[:] = sorted([decode(d) for d in dirnames]) # line 883 + dirnames[:] = [d for d in dirnames if len([n for n in m.c.ignoreDirs if fnmatch.fnmatch(d, n)]) == 0 or len([p for p in m.c.ignoreDirsWhitelist if fnmatch.fnmatch(d, p)]) > 0] # global ignores # line 884 + + folder = decode(dirpath) # line 886 + relPath = relativize(m.root, os.path.join(folder, "-"))[0] # line 887 + if patterns: # line 888 + out = ajoin("TRK ", [os.path.basename(p) for p in trackingPatterns if os.path.dirname(p).replace(os.sep, SLASH) == relPath], nl="\n") # type: str # line 889 + if out: # line 890 + printo("DIR %s\n" % relPath + out) # line 890 + continue # with next folder # line 891 + files = list(sorted((entry for entry in os.listdir(folder) if os.path.isfile(os.path.join(folder, entry))))) # type: List[str] # line 892 + if len(files) > 0: # line 893 + printo("DIR %s" % relPath) # line 893 + for file in files: # for each file list all tracking patterns that match, or none (e.g. in picky mode after commit) # line 894 + ignore = None # type: _coconut.typing.Optional[str] # line 895 + for ig in m.c.ignores: # remember first match # line 896 + if fnmatch.fnmatch(file, ig): # remember first match # line 896 + ignore = ig # remember first match # line 896 + break # remember first match # line 896 + if ignore: # found a white list entry for ignored file, undo ignoring it # line 897 + for wl in m.c.ignoresWhitelist: # found a white list entry for ignored file, undo ignoring it # line 897 + if fnmatch.fnmatch(file, wl): # found a white list entry for ignored file, undo ignoring it # line 897 + ignore = None # found a white list entry for ignored file, undo ignoring it # line 897 + break # found a white list entry for ignored file, undo ignoring it # line 897 + matches = [] # type: List[str] # line 898 + if not ignore: # line 899 + for pattern in (p for p in trackingPatterns if os.path.dirname(p).replace(os.sep, SLASH) == relPath): # only patterns matching current folder # line 900 + if fnmatch.fnmatch(file, os.path.basename(pattern)): # line 901 + matches.append(os.path.basename(pattern)) # line 901 + matches.sort(key=lambda element: len(element)) # sort in-place # line 902 + printo("%s %s%s" % ("IGN" if ignore is not None else ("TRK" if len(matches) > 0 else DOT), file, " (%s)" % ignore if ignore is not None else (" (%s)" % ("; ".join(matches)) if len(matches) > 0 else ""))) # line 903 + +def log(options: '_coconut.typing.Sequence[str]'=[]): # line 905 + ''' List previous commits on current branch. ''' # line 906 + changes_ = "--changes" in options # type: bool # line 907 + diff_ = "--diff" in options # type: bool # line 908 + number_ = tryOrDefault(lambda _=None: int(sys.argv[sys.argv.index("-n") + 1]), None) # type: _coconut.typing.Optional[int] # line 909 + m = Metadata() # type: Metadata # line 910 + m.loadBranch(m.branch) # knows commit history # line 911 + maxi = max(m.commits) if m.commits else m.branches[m.branch].revision # type: int # one commit guaranteed for first offline branch, for fast-branched branches a revision in branchinfo # line 912 + info((lambda _coconut_none_coalesce_item: "r%02d" % m.branch if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(usage.MARKER + "Offline commit history of branch '%s'" % m.branches[m.branch].name)) # TODO also retain info of "from branch/revision" on branching? # line 913 + nl = len("%d" % maxi) # type: int # determine space needed for revision # line 914 + changesetIterator = m.computeSequentialPathSetIterator(m.branch, maxi) # type: _coconut.typing.Optional[Iterator[Dict[str, PathInfo]]] # line 915 + olds = _coconut.frozenset() # type: FrozenSet[str] # last revision's entries # line 916 + last = {} # type: Dict[str, PathInfo] # path infos from previous revision # line 917 + commit = None # type: CommitInfo # line 918 + n = Metadata() # type: Metadata # used for reading parent branch information # line 919 + for no in range(maxi + 1): # line 920 + if no in m.commits: # line 921 + commit = m.commits[no] # line 921 + else: # line 922 + if n.branch != n.getParentBranch(m.branch, no): # line 923 + n.loadBranch(n.getParentBranch(m.branch, no)) # line 923 + commit = n.commits[no] # line 924 + nxts = next(changesetIterator) # type: Dict[str, PathInfo] # line 925 + news = frozenset(nxts.keys()) # type: FrozenSet[str] # line 926 + if "--all" in options or no >= max(0, maxi + 1 - ((lambda _coconut_none_coalesce_item: m.c.logLines if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(number_))): # line 927 + _add = news - olds # type: FrozenSet[str] # line 928 + _del = olds - news # type: FrozenSet[str] # line 929 # _mod_:Dict[str,PathInfo] = {k: nxts[k] for k in news - _add - _del} - _mod = frozenset([_ for _, info in {k: nxts[k] for k in news - _add - _del}.items() if last[_].size != info.size or (last[_].hash != info.hash if m.strict else last[_].mtime != info.mtime)]) # type: FrozenSet[str] # line 916 + _mod = frozenset([_ for _, info in {k: nxts[k] for k in news - _add - _del}.items() if last[_].size != info.size or (last[_].hash != info.hash if m.strict else last[_].mtime != info.mtime)]) # type: FrozenSet[str] # line 931 # _mov:FrozenSet[str] = detectMoves(ChangeSet(nxts, {o: None for o in olds}) # TODO determine moves - can we reuse detectMoves(changes)? - _txt = len([m_ for m_ in _mod if m.isTextType(m_)]) # type: int # line 918 - printo(" %s r%s @%s (+%02d/-%02d/%s%02d/T%02d) |%s|%s" % ("*" if commit.number == maxi else " ", ("%%%ds" % nl) % commit.number, strftime(commit.ctime), len(_add), len(_del), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(_mod), _txt, ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(commit.message)), "TAG" if ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(commit.message)) in m.tags else "")) # line 919 - if changes_: # TODO moves detection? # line 920 - (m.listChanges)(ChangeSet({a: None for a in _add}, {d: None for d in _del}, {m: None for m in _mod}, {})) # TODO moves detection? # line 920 - if diff_: # _diff(m, changes) # needs from revision diff # line 921 - pass # _diff(m, changes) # needs from revision diff # line 921 - olds = news # replaces olds for next revision compare # line 922 - last = {k: v for k, v in nxts.items()} # create new reference # line 923 - -def dump(argument: 'str', options: '_coconut.typing.Sequence[str]'=[]): # line 925 - ''' Exported entire repository as archive for easy transfer. ''' # line 926 - if verbose: # line 927 - info(usage.MARKER + "Dumping repository to archive...") # line 927 - m = Metadata() # type: Metadata # to load the configuration # line 928 - progress = '--progress' in options # type: bool # line 929 - delta = '--full' not in options # type: bool # line 930 - skipBackup = '--skip-backup' in options # type: bool # line 931 - import functools # line 932 - import locale # line 932 - import warnings # line 932 - import zipfile # line 932 - try: # HINT zlib is the library that contains the deflated algorithm # line 933 - import zlib # HINT zlib is the library that contains the deflated algorithm # line 933 - compression = zipfile.ZIP_DEFLATED # HINT zlib is the library that contains the deflated algorithm # line 933 - except: # line 934 - compression = zipfile.ZIP_STORED # line 934 - - if argument is None: # line 936 - Exit("Argument missing (target filename)") # line 936 - argument = argument if "." in argument else argument + DUMP_FILE # TODO this logic lacks a bit, "v1.2" would not receive the suffix # line 937 - entries = [] # type: List[str] # line 938 - if os.path.exists(encode(argument)) and not skipBackup: # line 939 - try: # line 940 - if verbose: # line 941 - info("Creating backup...") # line 941 - shutil.copy2(encode(argument), encode(argument + BACKUP_SUFFIX)) # line 942 - if delta: # list of pure relative paths without leading dot, normal slashes # line 943 - with zipfile.ZipFile(argument, "r") as _zip: # list of pure relative paths without leading dot, normal slashes # line 943 - entries = _zip.namelist() # list of pure relative paths without leading dot, normal slashes # line 943 - except Exception as E: # line 944 - Exit("Error creating backup copy before dumping. Please resolve and retry. %r" % E) # line 944 - if verbose: # line 945 - info("Dumping revisions...") # line 945 - if delta: # , UserWarning, "zipfile", 0) # don't show duplicate entries warnings # line 946 - warnings.filterwarnings('ignore', 'Duplicate name.*') # , UserWarning, "zipfile", 0) # don't show duplicate entries warnings # line 946 - with zipfile.ZipFile(argument, "a" if delta else "w", compression) as _zip: # create # line 947 - _zip.debug = 0 # suppress debugging output # line 948 - _zip.comment = ("Repository dump from %r" % strftime()).encode(UTF8) # line 949 - repopath = os.path.join(os.getcwd(), metaFolder) # type: str # line 950 - indicator = ProgressIndicator(PROGRESS_MARKER[1 if m.c.useUnicodeFont else 0]) if progress else None # type: _coconut.typing.Optional[ProgressIndicator] # line 951 - totalsize = 0 # type: int # line 952 - start_time = time.time() # type: float # line 953 - for dirpath, dirnames, filenames in os.walk(repopath): # TODO use index knowledge instead of walking to avoid adding stuff not needed? # line 954 - dirpath = decode(dirpath) # line 955 - if dirpath.endswith(BACKUP_SUFFIX): # don't backup backups # line 956 - continue # don't backup backups # line 956 - printo(pure.ljust(dirpath)) # TODO improve progress indicator output to | dir | dumpuing file # line 957 - dirnames[:] = sorted([decode(d) for d in dirnames], key=functools.cmp_to_key(lambda a, b: tryOrDefault(lambda: locale.strcoll("%8d" % int(a[1:]), "%8d" % int(b[1:])), locale.strcoll(a, b)))) # HINT sort for reproducible delta dumps # line 958 - filenames[:] = sorted([decode(f) for f in filenames]) # line 959 - for filename in filenames: # line 960 - abspath = os.path.join(dirpath, filename) # type: str # line 961 - relpath = os.path.join(metaFolder, os.path.relpath(abspath, repopath)).replace(os.sep, "/") # type: str # line 962 - totalsize += os.stat(encode(abspath)).st_size # line 963 - show = indicator.getIndicator() if progress else None # type: _coconut.typing.Optional[str] # line 964 - if relpath.endswith(BACKUP_SUFFIX): # don't backup backups # line 965 - continue # don't backup backups # line 965 - if not delta or relpath.endswith(metaFile) or relpath not in entries: # always update metadata, otherwise only add new revision files # line 966 - if show: # line 967 - printo("\r" + pure.ljust("Dumping %s @%.2f MiB/s %s" % (show, totalsize / (MEBI * (time.time() - start_time)), filename)), nl="") # line 967 - _zip.write(abspath, relpath) # write entry into archive # line 968 - if delta: # line 969 - _zip.comment = ("Delta dump from %r" % strftime()).encode(UTF8) # line 969 - info("\r" + pure.ljust(usage.MARKER + "Finished dumping %s repository @%.2f MiB/s." % ("differential" if delta else "entire", totalsize / (MEBI * (time.time() - start_time))))) # clean line # line 970 - -def publish(message: '_coconut.typing.Optional[str]', cmd: 'str', options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 972 - ''' Write changes made to the branch into one commit of the underlying VCS without further checks. ''' # line 973 - m = Metadata() # type: Metadata # line 974 - # if not (m.track or m.picky): # TODO add manual file picking mode instead (add by extension, recursive, ... see issue for details) # line 975 - # Exit("Not implemented for simple repository mode yet") # TODO add manual file picking mode instead (add by extension, recursive, ... see issue for details) # line 975 - m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options, onlys=onlys, excps=excps) # line 976 - maxi = m.getHighestRevision(branch) # type: _coconut.typing.Optional[int] # line 977 - if maxi is None: # line 978 - Exit("No revision to publish on current branch (or any of its parents after fast-branching)") # line 978 - m.computeSequentialPathSet(branch, maxi) # load all commits up to specified revision # line 979 + _txt = len([m_ for m_ in _mod if m.isTextType(m_)]) # type: int # line 933 + printo(" %s r%s @%s (+%02d/-%02d/%s%02d/T%02d) |%s|%s" % ((ARROW_SYMBOL if m.c.useUnicodeFont else "*") if commit.number == maxi else " ", ("%%%ds" % nl) % commit.number, strftime(commit.ctime), len(_add), len(_del), PLUSMINUS_SYMBOL if m.c.useUnicodeFont else "~", len(_mod), _txt, ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(commit.message)), "TAG" if ((lambda _coconut_none_coalesce_item: "" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(commit.message)) in m.tags else "")) # line 934 + if changes_: # TODO moves detection? # line 935 + (m.listChanges)(ChangeSet({a: None for a in _add}, {d: None for d in _del}, {m: None for m in _mod}, {})) # TODO moves detection? # line 935 + if diff_: # _diff(m, changes) # needs from revision diff # line 936 + pass # _diff(m, changes) # needs from revision diff # line 936 + olds = news # replaces olds for next revision compare # line 937 + last = {k: v for k, v in nxts.items()} # create new reference # line 938 + +def dump(argument: 'str', options: '_coconut.typing.Sequence[str]'=[]): # line 940 + ''' Exported entire repository as archive for easy transfer. ''' # line 941 + if verbose: # line 942 + info(usage.MARKER + "Dumping repository to archive...") # line 942 + m = Metadata() # type: Metadata # to load the configuration # line 943 + progress = '--progress' in options # type: bool # line 944 + delta = '--full' not in options # type: bool # line 945 + skipBackup = '--skip-backup' in options # type: bool # line 946 + import functools # line 947 + import locale # line 947 + import warnings # line 947 + import zipfile # line 947 + try: # HINT zlib is the library that contains the deflated algorithm # line 948 + import zlib # HINT zlib is the library that contains the deflated algorithm # line 948 + compression = zipfile.ZIP_DEFLATED # HINT zlib is the library that contains the deflated algorithm # line 948 + except: # line 949 + compression = zipfile.ZIP_STORED # line 949 + + if argument is None: # line 951 + Exit("Argument missing (target filename)") # line 951 + argument = argument if "." in argument else argument + DUMP_FILE # TODO this logic lacks a bit, "v1.2" would not receive the suffix # line 952 + entries = [] # type: List[str] # line 953 + if os.path.exists(encode(argument)) and not skipBackup: # line 954 + try: # line 955 + if verbose: # line 956 + info("Creating backup...") # line 956 + shutil.copy2(encode(argument), encode(argument + BACKUP_SUFFIX)) # line 957 + if delta: # list of pure relative paths without leading dot, normal slashes # line 958 + with zipfile.ZipFile(argument, "r") as _zip: # list of pure relative paths without leading dot, normal slashes # line 958 + entries = _zip.namelist() # list of pure relative paths without leading dot, normal slashes # line 958 + except Exception as E: # line 959 + Exit("Error creating backup copy before dumping. Please resolve and retry. %r" % E) # line 959 + if verbose: # line 960 + info("Dumping revisions...") # line 960 + if delta: # , UserWarning, "zipfile", 0) # don't show duplicate entries warnings # line 961 + warnings.filterwarnings('ignore', 'Duplicate name.*') # , UserWarning, "zipfile", 0) # don't show duplicate entries warnings # line 961 + with zipfile.ZipFile(argument, "a" if delta else "w", compression) as _zip: # create # line 962 + _zip.debug = 0 # suppress debugging output # line 963 + _zip.comment = ("Repository dump from %r" % strftime()).encode(UTF8) # line 964 + repopath = os.path.join(os.getcwd(), metaFolder) # type: str # line 965 + indicator = ProgressIndicator(PROGRESS_MARKER[1 if m.c.useUnicodeFont else 0]) if progress else None # type: _coconut.typing.Optional[ProgressIndicator] # line 966 + totalsize = 0 # type: int # line 967 + start_time = time.time() # type: float # line 968 + for dirpath, dirnames, filenames in os.walk(repopath): # TODO use index knowledge instead of walking to avoid adding stuff not needed? # line 969 + dirpath = decode(dirpath) # line 970 + if dirpath.endswith(BACKUP_SUFFIX): # don't backup backups # line 971 + continue # don't backup backups # line 971 + printo(pure.ljust(dirpath)) # TODO improve progress indicator output to | dir | dumpuing file # line 972 + dirnames[:] = sorted([decode(d) for d in dirnames], key=functools.cmp_to_key(lambda a, b: tryOrDefault(lambda: locale.strcoll("%8d" % int(a[1:]), "%8d" % int(b[1:])), locale.strcoll(a, b)))) # HINT sort for reproducible delta dumps # line 973 + filenames[:] = sorted([decode(f) for f in filenames]) # line 974 + for filename in filenames: # line 975 + abspath = os.path.join(dirpath, filename) # type: str # line 976 + relpath = os.path.join(metaFolder, os.path.relpath(abspath, repopath)).replace(os.sep, "/") # type: str # line 977 + totalsize += os.stat(encode(abspath)).st_size # line 978 + show = indicator.getIndicator() if progress else None # type: _coconut.typing.Optional[str] # line 979 + if relpath.endswith(BACKUP_SUFFIX): # don't backup backups # line 980 + continue # don't backup backups # line 980 + if not delta or relpath.endswith(metaFile) or relpath not in entries: # always update metadata, otherwise only add new revision files # line 981 + if show: # line 982 + printo("\r" + pure.ljust("Dumping %s @%.2f MiB/s %s" % (show, totalsize / (MEBI * (time.time() - start_time)), filename)), nl="") # line 982 + _zip.write(abspath, relpath) # write entry into archive # line 983 + if delta: # line 984 + _zip.comment = ("Delta dump from %r" % strftime()).encode(UTF8) # line 984 + info("\r" + pure.ljust(usage.MARKER + "Finished dumping %s repository @%.2f MiB/s." % ("differential" if delta else "entire", totalsize / (MEBI * (time.time() - start_time))))) # clean line # line 985 + +def publish(message: '_coconut.typing.Optional[str]', cmd: 'str', options: '_coconut.typing.Sequence[str]'=[], onlys: '_coconut.typing.Optional[FrozenSet[str]]'=None, excps: '_coconut.typing.Optional[FrozenSet[str]]'=None): # line 987 + ''' Write changes made to the branch into one commit of the underlying VCS without further checks. ''' # line 988 + m = Metadata() # type: Metadata # TODO SOS only commit whats different from VCS state? # line 989 + if not (m.track or m.picky): # TODO add manual file picking mode instead (add by extension, recursive, ... see issue for details) # line 990 + Exit("Not implemented for simple repository mode yet") # TODO add manual file picking mode instead (add by extension, recursive, ... see issue for details) # line 990 + m, branch, revision, changed, strict, force, trackingPatterns, untrackingPatterns = exitOnChanges(None, options, onlys=onlys, excps=excps) # line 991 + maxi = m.getHighestRevision(branch) # type: _coconut.typing.Optional[int] # line 992 + if maxi is None: # line 993 + Exit("No revision to publish on current branch (or any of its parents after fast-branching)") # line 993 + m.computeSequentialPathSet(branch, maxi, startwith=1 if maxi >= 1 and not '--all' in options and not (m.track or m.picky) else 0) # load all commits up to specified revision # line 994 # HINT logic to only add changed files vs. originating file state - would require in-depth underlying VCS knowledge. We currentöy assume commit 0 as base # TODO discuss: only commit changes from r1.. onward vs. r0?, or attempt to add everything in repo, even if unchanged? the problem is that for different branches we might need to switch also underlying branches - import subprocess # only required in this section # line 982 + import subprocess # only required in this section # line 997 # HINT stash/rollback for Git? or implement a global mechanism to revert? - command = fitStrings(list(m.paths.keys()), prefix="%s add" % cmd, process=lambda _=None: '"%s"' % _.replace("\"", "\\\"")) # type: str # considering maximum command-line length, filename quoting, and spaces # line 984 -# returncode:int = subprocess.Popen(command, shell = False).wait() - returncode = 0 # type: int #, shell = False) # TODO # line 986 - printo(command) #, shell = False) # TODO # line 986 - if returncode != 0: # line 987 - Exit("Error adding files from SOS revision to underlying VCS. Leaving %s in potentially inconsistent state" % vcsNames[cmd]) # line 987 - tracked = None # type: bool # line 988 - commitArgs = None # type: _coconut.typing.Optional[str] # line 988 - tracked, commitArgs = vcsCommits[cmd] # line 988 -#returncode = subprocess.Popen(('%s commit -m "%s" %s' % (cmd, message ?? "Committed from SOS %s/r%02d on %s" % (m.branches[branch].name ?? ("b%d" % m.branch), revision, strftime()).replace("\"", "\\\""), commitArgs ?? ""))) # TODO quote-escaping on Windows - printo(('%s commit -m "%s" %s' % (cmd, (("Committed from SOS %s/r%02d on %s" % ((lambda _coconut_none_coalesce_item: ("b%d" % m.branch) if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision, strftime())).replace("\"", "\\\"") if message is None else message), ("" if commitArgs is None else commitArgs)))) # line 990 - if returncode != 0: # line 991 - Exit("Error committing files from SOS revision to underlying VCS. Please check current %s state" % cmd) # line 991 - if tracked: # line 992 - printo("Please note that all the files added in this commit will continue to be tracked by the underlying VCS") # line 992 - -def config(arguments: 'List[str]', options: 'List[str]'=[]): # line 994 - command = None # type: str # line 995 - key = None # type: str # line 995 - value = None # type: str # line 995 - v = None # type: str # line 995 - command, key, value = (arguments + [None] * 2)[:3] # line 996 - if command is None: # line 997 - usage.usage("help", verbose=True) # line 997 - if command not in ["set", "unset", "show", "list", "add", "rm"]: # line 998 - Exit("Unknown config command") # line 998 - local = "--local" in options # type: bool # line 999 - m = Metadata() # type: Metadata # loads layered configuration as well. TODO warning if repo not exists # line 1000 - c = m.c if local else m.c.__defaults # type: configr.Configr # line 1001 - if command == "set": # line 1002 - if None in (key, value): # line 1003 - Exit("Key or value not specified") # line 1003 - if key not in (([] if local else CONFIGURABLE_FLAGS + ["defaultbranch"]) + CONFIGURABLE_LISTS + CONFIGURABLE_INTS): # TODO move defaultbranch to configurable_texts? # line 1004 - Exit("Unsupported key for %s configuration %r" % ("local " if local else "global", key)) # TODO move defaultbranch to configurable_texts? # line 1004 - if key in CONFIGURABLE_FLAGS and value.lower() not in TRUTH_VALUES + FALSE_VALUES: # line 1005 - Exit("Cannot set flag to '%s'. Try on/off instead" % value.lower()) # line 1005 - c[key] = value.lower() in TRUTH_VALUES if key in CONFIGURABLE_FLAGS else (tryOrIgnore(lambda _=None: int(value), lambda E: error("Not an integer value: %r" % E)) if key in CONFIGURABLE_INTS else (removePath(key, value.strip()) if key not in CONFIGURABLE_LISTS else [removePath(key, v) for v in safeSplit(value, ";")])) # TODO sanitize texts? # line 1006 - elif command == "unset": # line 1007 - if key is None: # line 1008 - Exit("No key specified") # line 1008 - if key not in c.keys(): # HINT: Works on local configurations when used with --local # line 1009 - Exit("Unknown key") # HINT: Works on local configurations when used with --local # line 1009 - del c[key] # line 1010 - elif command == "add": # line 1011 - if None in (key, value): # line 1012 - Exit("Key or value not specified") # line 1012 - if key not in CONFIGURABLE_LISTS: # line 1013 - Exit("Unsupported key %r" % key) # line 1013 - if key not in c.keys(): # prepare empty list, or copy from global, add new value below # line 1014 - c[key] = [_ for _ in c.__defaults[key]] if local else [] # prepare empty list, or copy from global, add new value below # line 1014 - elif value in c[key]: # line 1015 - Exit("Value already contained, nothing to do") # line 1015 - if ";" in value: # line 1016 - c[key].append(removePath(key, value)) # line 1016 - else: # line 1017 - c[key].extend([removePath(key, v) for v in value.split(";")]) # line 1017 - elif command == "rm": # line 1018 - if None in (key, value): # line 1019 - Exit("Key or value not specified") # line 1019 - if key not in c.keys(): # line 1020 - Exit("Unknown key %r" % key) # line 1020 - if value not in c[key]: # line 1021 - Exit("Unknown value %r" % value) # line 1021 - c[key].remove(value) # line 1022 - if local and len(c[key]) == 0 and "--prune" in options: # remove local entry, to fallback to global # line 1023 - del c[key] # remove local entry, to fallback to global # line 1023 - else: # Show or list # line 1024 - if key == "ints": # list valid configuration items # line 1025 - printo(", ".join(CONFIGURABLE_INTS)) # list valid configuration items # line 1025 - elif key == "flags": # line 1026 - printo(", ".join(CONFIGURABLE_FLAGS)) # line 1026 - elif key == "lists": # line 1027 - printo(", ".join(CONFIGURABLE_LISTS)) # line 1027 - elif key == "texts": # line 1028 - printo(", ".join([_ for _ in defaults.keys() if _ not in (CONFIGURABLE_FLAGS + CONFIGURABLE_LISTS)])) # line 1028 - else: # line 1029 - out = {3: "[default]", 2: "[global] ", 1: "[local] "} # type: Dict[int, str] # in contrast to Git, we don't need (nor want) to support a "system" config scope # line 1030 - c = m.c # always use full configuration chain # line 1031 - try: # attempt single key # line 1032 - assert key is not None # force exception # line 1033 - c[key] # force exception # line 1033 - l = key in c.keys() # type: bool # line 1034 - g = key in c.__defaults.keys() # type: bool # line 1034 - printo("%s %s %r" % (key.rjust(20), out[3] if not (l or g) else (out[1] if l else out[2]), c[key])) # line 1035 - except: # normal value listing # line 1036 - vals = {k: (repr(v), 3) for k, v in defaults.items()} # type: Dict[str, Tuple[str, int]] # line 1037 - vals.update({k: (repr(v), 2) for k, v in c.__defaults.items()}) # line 1038 - vals.update({k: (repr(v), 1) for k, v in c.__map.items()}) # line 1039 - for k, vt in sorted(vals.items()): # line 1040 - printo("%s %s %s" % (k.rjust(20), out[vt[1]], vt[0])) # line 1040 - if len(c.keys()) == 0: # line 1041 - info("No local configuration stored") # line 1041 - if len(c.__defaults.keys()) == 0: # line 1042 - info("No global configuration stored") # line 1042 - return # in case of list, no need to store anything # line 1043 - if local: # saves changes of repoConfig # line 1044 - m.repoConf = c.__map # saves changes of repoConfig # line 1044 - m.saveBranches() # saves changes of repoConfig # line 1044 - Exit("OK", code=0) # saves changes of repoConfig # line 1044 - else: # global config # line 1045 - f, h = saveConfig(c) # only saves c.__defaults (nested Configr) # line 1046 - if f is None: # line 1047 - error("Error saving user configuration: %r" % h) # line 1047 - else: # line 1048 - Exit("OK", code=0) # line 1048 - -def move(relPath: 'str', pattern: 'str', newRelPath: 'str', newPattern: 'str', options: 'List[str]'=[], negative: 'bool'=False): # line 1050 + files = list(m.paths.keys()) # type: _coconut.typing.Sequence[str] # line 999 + while files: # line 1000 + command = fitStrings(files, prefix="%s add" % cmd, process=lambda _=None: '"%s"' % _.replace("\"", "\\\"")) # type: str # considering maximum command-line length, filename quoting, and spaces # line 1001 + returncode = subprocess.Popen(command, shell=False).wait() # type: int # line 1002 +# returncode:int = 0; debug(command) + if returncode != 0: # line 1004 + Exit("Error adding files from SOS revision to underlying VCS. Leaving %s in potentially inconsistent state" % vcsNames[cmd]) # line 1004 + tracked = None # type: bool # line 1005 + commitArgs = None # type: _coconut.typing.Optional[str] # line 1005 + tracked, commitArgs = vcsCommits[cmd] # line 1005 + returncode = subprocess.Popen(('%s commit -m "%s" %s' % (cmd, (("Committed from SOS %s/r%02d on %s" % ((lambda _coconut_none_coalesce_item: ("b%d" % m.branch) if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(m.branches[branch].name), revision, strftime())).replace("\"", "\\\"") if message is None else message), ("" if commitArgs is None else commitArgs)))) # TODO quote-escaping on Windows # line 1006 +# debug(('%s commit -m "%s" %s' % (cmd, message ?? ("Committed from SOS %s/r%02d on %s" % (m.branches[branch].name ?? ("b%d" % m.branch), revision, strftime())).replace("\"", "\\\""), commitArgs ?? ""))) + if returncode != 0: # line 1008 + Exit("Error committing files from SOS revision to underlying VCS. Please check current %s state" % cmd) # line 1008 + if tracked: # line 1009 + warn("Please note that all the files added in this commit will continue to be tracked by the underlying VCS") # line 1009 + +def config(arguments: 'List[str]', options: 'List[str]'=[]): # line 1011 + command = None # type: str # line 1012 + key = None # type: str # line 1012 + value = None # type: str # line 1012 + v = None # type: str # line 1012 + command, key, value = (arguments + [None] * 2)[:3] # line 1013 + if command is None: # line 1014 + usage.usage("help", verbose=True) # line 1014 + if command not in ["set", "unset", "show", "list", "add", "rm"]: # line 1015 + Exit("Unknown config command") # line 1015 + local = "--local" in options # type: bool # line 1016 + m = Metadata() # type: Metadata # loads layered configuration as well. TODO warning if repo not exists # line 1017 + c = m.c if local else m.c.__defaults # type: configr.Configr # line 1018 + if command == "set": # line 1019 + if None in (key, value): # line 1020 + Exit("Key or value not specified") # line 1020 + if key not in (([] if local else CONFIGURABLE_FLAGS + ["defaultbranch"]) + CONFIGURABLE_LISTS + CONFIGURABLE_INTS): # TODO move defaultbranch to configurable_texts? # line 1021 + Exit("Unsupported key for %s configuration %r" % ("local " if local else "global", key)) # TODO move defaultbranch to configurable_texts? # line 1021 + if key in CONFIGURABLE_FLAGS and value.lower() not in TRUTH_VALUES + FALSE_VALUES: # line 1022 + Exit("Cannot set flag to '%s'. Try on/off instead" % value.lower()) # line 1022 + c[key] = value.lower() in TRUTH_VALUES if key in CONFIGURABLE_FLAGS else (tryOrIgnore(lambda _=None: int(value), lambda E: error("Not an integer value: %r" % E)) if key in CONFIGURABLE_INTS else (removePath(key, value.strip()) if key not in CONFIGURABLE_LISTS else [removePath(key, v) for v in safeSplit(value, ";")])) # TODO sanitize texts? # line 1023 + elif command == "unset": # line 1024 + if key is None: # line 1025 + Exit("No key specified") # line 1025 + if key not in c.keys(): # HINT: Works on local configurations when used with --local # line 1026 + Exit("Unknown key") # HINT: Works on local configurations when used with --local # line 1026 + del c[key] # line 1027 + elif command == "add": # line 1028 + if None in (key, value): # line 1029 + Exit("Key or value not specified") # line 1029 + if key not in CONFIGURABLE_LISTS: # line 1030 + Exit("Unsupported key %r" % key) # line 1030 + if key not in c.keys(): # prepare empty list, or copy from global, add new value below # line 1031 + c[key] = [_ for _ in c.__defaults[key]] if local else [] # prepare empty list, or copy from global, add new value below # line 1031 + elif value in c[key]: # line 1032 + Exit("Value already contained, nothing to do") # line 1032 + if ";" in value: # line 1033 + c[key].append(removePath(key, value)) # line 1033 + else: # line 1034 + c[key].extend([removePath(key, v) for v in value.split(";")]) # line 1034 + elif command == "rm": # line 1035 + if None in (key, value): # line 1036 + Exit("Key or value not specified") # line 1036 + if key not in c.keys(): # line 1037 + Exit("Unknown key %r" % key) # line 1037 + if value not in c[key]: # line 1038 + Exit("Unknown value %r" % value) # line 1038 + c[key].remove(value) # line 1039 + if local and len(c[key]) == 0 and "--prune" in options: # remove local entry, to fallback to global # line 1040 + del c[key] # remove local entry, to fallback to global # line 1040 + else: # Show or list # line 1041 + if key == "ints": # list valid configuration items # line 1042 + printo(", ".join(CONFIGURABLE_INTS)) # list valid configuration items # line 1042 + elif key == "flags": # line 1043 + printo(", ".join(CONFIGURABLE_FLAGS)) # line 1043 + elif key == "lists": # line 1044 + printo(", ".join(CONFIGURABLE_LISTS)) # line 1044 + elif key == "texts": # line 1045 + printo(", ".join([_ for _ in defaults.keys() if _ not in (CONFIGURABLE_FLAGS + CONFIGURABLE_LISTS)])) # line 1045 + else: # line 1046 + out = {3: "[default]", 2: "[global] ", 1: "[local] "} # type: Dict[int, str] # in contrast to Git, we don't need (nor want) to support a "system" config scope # line 1047 + c = m.c # always use full configuration chain # line 1048 + try: # attempt single key # line 1049 + assert key is not None # force exception # line 1050 + c[key] # force exception # line 1050 + l = key in c.keys() # type: bool # line 1051 + g = key in c.__defaults.keys() # type: bool # line 1051 + printo("%s %s %r" % (key.rjust(20), out[3] if not (l or g) else (out[1] if l else out[2]), c[key])) # line 1052 + except: # normal value listing # line 1053 + vals = {k: (repr(v), 3) for k, v in defaults.items()} # type: Dict[str, Tuple[str, int]] # line 1054 + vals.update({k: (repr(v), 2) for k, v in c.__defaults.items()}) # line 1055 + vals.update({k: (repr(v), 1) for k, v in c.__map.items()}) # line 1056 + for k, vt in sorted(vals.items()): # line 1057 + printo("%s %s %s" % (k.rjust(20), out[vt[1]], vt[0])) # line 1057 + if len(c.keys()) == 0: # line 1058 + info("No local configuration stored") # line 1058 + if len(c.__defaults.keys()) == 0: # line 1059 + info("No global configuration stored") # line 1059 + return # in case of list, no need to store anything # line 1060 + if local: # saves changes of repoConfig # line 1061 + m.repoConf = c.__map # saves changes of repoConfig # line 1061 + m.saveBranches() # saves changes of repoConfig # line 1061 + Exit("OK", code=0) # saves changes of repoConfig # line 1061 + else: # global config # line 1062 + f, h = saveConfig(c) # only saves c.__defaults (nested Configr) # line 1063 + if f is None: # line 1064 + error("Error saving user configuration: %r" % h) # line 1064 + else: # line 1065 + Exit("OK", code=0) # line 1065 + +def move(relPath: 'str', pattern: 'str', newRelPath: 'str', newPattern: 'str', options: 'List[str]'=[], negative: 'bool'=False): # line 1067 ''' Path differs: Move files, create folder if not existing. Pattern differs: Attempt to rename file, unless exists in target or not unique. for "mvnot" don't do any renaming (or do?) - ''' # line 1053 - if verbose: # line 1054 - info(usage.MARKER + "Renaming %r to %r" % (pattern, newPattern)) # line 1054 - force = '--force' in options # type: bool # line 1055 - soft = '--soft' in options # type: bool # line 1056 - if not os.path.exists(encode(relPath.replace(SLASH, os.sep))) and not force: # line 1057 - Exit("Source folder doesn't exist. Use --force to proceed anyway") # line 1057 - m = Metadata() # type: Metadata # line 1058 - patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 1059 - matching = fnmatch.filter(os.listdir(relPath.replace(SLASH, os.sep)) if os.path.exists(encode(relPath.replace(SLASH, os.sep))) else [], os.path.basename(pattern)) # type: List[str] # find matching files in source # line 1060 - matching[:] = [f for f in matching if len([n for n in m.c.ignores if fnmatch.fnmatch(f, n)]) == 0 or len([p for p in m.c.ignoresWhitelist if fnmatch.fnmatch(f, p)]) > 0] # line 1061 - if not matching and not force: # line 1062 - Exit("No files match the specified file pattern. Use --force to proceed anyway") # line 1062 - if not (m.track or m.picky): # line 1063 - Exit("Repository is in simple mode. Simply use basic file operations to modify files, then execute 'sos commit' to version the changes") # line 1063 - if pattern not in patterns: # list potential alternatives and exit # line 1064 - for tracked in (t for t in patterns if os.path.dirname(t) == relPath): # for all patterns of the same source folder # line 1065 - alternative = fnmatch.filter(matching, os.path.basename(tracked)) # type: _coconut.typing.Sequence[str] # find if it matches any of the files in the source folder, too # line 1066 - if alternative: # line 1067 - info(" '%s' matches %d files" % (tracked, len(alternative))) # line 1067 - if not (force or soft): # line 1068 - Exit("File pattern '%s' is not tracked on current branch. 'sos move' only works on tracked patterns" % pattern) # line 1068 - basePattern = os.path.basename(pattern) # type: str # pure glob without folder # line 1069 - newBasePattern = os.path.basename(newPattern) # type: str # line 1070 - if basePattern.count("*") < newBasePattern.count("*") or (basePattern.count("?") - basePattern.count("[?]")) < (newBasePattern.count("?") - newBasePattern.count("[?]")) or (basePattern.count("[") - basePattern.count("\\[")) < (newBasePattern.count("[") - newBasePattern.count("\\[")) or (basePattern.count("]") - basePattern.count("\\]")) < (newBasePattern.count("]") - newBasePattern.count("\\]")): # line 1071 - Exit("Glob markers from '%s' to '%s' don't match, cannot move/rename tracked matching files" % (basePattern, newBasePattern)) # line 1075 - oldTokens = None # type: _coconut.typing.Sequence[GlobBlock] # line 1076 - newToken = None # type: _coconut.typing.Sequence[GlobBlock] # line 1076 - oldTokens, newTokens = tokenizeGlobPatterns(os.path.basename(pattern), os.path.basename(newPattern)) # line 1077 - matches = convertGlobFiles(matching, oldTokens, newTokens) # type: _coconut.typing.Sequence[Tuple[str, str]] # computes list of source - target filename pairs # line 1078 - if len({st[1] for st in matches}) != len(matches): # line 1079 - Exit("Some target filenames are not unique and different move/rename actions would point to the same target file") # line 1079 - matches = reorderRenameActions(matches, exitOnConflict=not soft) # attempts to find conflict-free renaming order, or exits # line 1080 - if os.path.exists(encode(newRelPath)): # line 1081 - exists = [filename[1] for filename in matches if os.path.exists(encode(os.path.join(newRelPath, filename[1]).replace(SLASH, os.sep)))] # type: _coconut.typing.Sequence[str] # line 1082 - if exists and not (force or soft): # line 1083 - Exit("%s files would write over existing files in %s cases. Use --force to execute it anyway" % ("Moving" if relPath != newRelPath else "Renaming", "all" if len(exists) == len(matches) else "some")) # line 1083 - else: # line 1084 - os.makedirs(encode(os.path.abspath(newRelPath.replace(SLASH, os.sep)))) # line 1084 - if not soft: # perform actual renaming # line 1085 - for (source, target) in matches: # line 1086 - try: # line 1087 - shutil.move(encode(os.path.abspath(os.path.join(relPath, source).replace(SLASH, os.sep))), encode(os.path.abspath(os.path.join(newRelPath, target).replace(SLASH, os.sep)))) # line 1087 - except Exception as E: # one error can lead to another in case of delicate renaming order # line 1088 - error("Cannot move/rename file '%s' to '%s'" % (source, os.path.join(newRelPath, target))) # one error can lead to another in case of delicate renaming order # line 1088 - patterns[patterns.index(pattern)] = newPattern # line 1089 - m.saveBranches() # line 1090 - -def parse(vcs: 'str', cwd: 'str', cmd: 'str'): # line 1092 - ''' Main operation. root is underlying VCS base dir. main() has already chdir'ed into SOS root folder, cwd is original working directory for add, rm, mv. ''' # line 1093 - debug("Parsing command-line arguments...") # line 1094 - root = os.getcwd() # line 1095 - try: # line 1096 - onlys, excps = parseOnlyOptions(cwd, sys.argv) # extracts folder-relative paths (used in changes, commit, diff, switch, update) # line 1097 - command = sys.argv[1].strip() if len(sys.argv) > 1 else "" # line 1098 - arguments = [c.strip() for c in sys.argv[2:] if not (c.startswith("-") and (len(c) == 2 or c[1] == "-"))] # type: List[_coconut.typing.Optional[str]] # line 1099 - options = [c.strip() for c in sys.argv[2:] if c.startswith("-") and (len(c) == 2 or c[1] == "-")] # options with arguments have to be parsed from sys.argv # line 1100 - debug("Processing command %r with arguments %r and options %r." % (command, [_ for _ in arguments if _ is not None], options)) # line 1101 - if command[:1] in "amr": # line 1102 - relPath, pattern = relativize(root, os.path.join(cwd, arguments[0] if arguments else ".")) # line 1102 - if command[:1] == "m": # line 1103 - if len(arguments) < 2: # line 1104 - Exit("Need a second file pattern argument as target for move command") # line 1104 - newRelPath, newPattern = relativize(root, os.path.join(cwd, arguments[1])) # line 1105 - arguments[:] = (arguments + [None] * 3)[:3] # line 1106 - if command[:1] == "a": # addnot # line 1107 - add(relPath, pattern, options, negative="n" in command) # addnot # line 1107 - elif command[:1] == "b": # line 1108 - branch(arguments[0], arguments[1], options) # line 1108 - elif command[:3] == "com": # line 1109 - commit(arguments[0], options, onlys, excps) # line 1109 - elif command[:2] == "ch": # "changes" (legacy) # line 1110 - changes(arguments[0], options, onlys, excps) # "changes" (legacy) # line 1110 - elif command[:2] == "ci": # line 1111 - commit(arguments[0], options, onlys, excps) # line 1111 - elif command[:3] == 'con': # line 1112 - config(arguments, options) # line 1112 - elif command[:2] == "de": # line 1113 - destroy(arguments[0], options) # line 1113 - elif command[:2] == "di": # line 1114 - diff(arguments[0], options, onlys, excps) # line 1114 - elif command[:2] == "du": # line 1115 - dump(arguments[0], options) # line 1115 - elif command[:1] == "h": # line 1116 - usage.usage(arguments[0], verbose=verbose) # line 1116 - elif command[:2] == "lo": # line 1117 - log(options) # line 1117 - elif command[:2] == "li": # line 1118 - ls(os.path.relpath((lambda _coconut_none_coalesce_item: cwd if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(arguments[0]), root), options) # line 1118 - elif command[:2] == "ls": # line 1119 - ls(os.path.relpath((lambda _coconut_none_coalesce_item: cwd if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(arguments[0]), root), options) # line 1119 - elif command[:1] == "m": # mvnot # line 1120 - move(relPath, pattern, newRelPath, newPattern, options, negative="n" in command) # mvnot # line 1120 - elif command[:2] == "of": # line 1121 - offline(arguments[0], arguments[1], options) # line 1121 - elif command[:2] == "on": # line 1122 - online(options) # line 1122 - elif command[:1] == "p": # line 1123 - publish(arguments[0], cmd, options, onlys, excps) # line 1123 - elif command[:1] == "r": # rmnot # line 1124 - remove(relPath, pattern, negative="n" in command) # rmnot # line 1124 - elif command[:2] == "st": # line 1125 - status(arguments[0], vcs, cmd, options, onlys, excps) # line 1125 - elif command[:2] == "sw": # line 1126 - switch(arguments[0], options, onlys, excps) # line 1126 - elif command[:1] == "u": # line 1127 - update(arguments[0], options, onlys, excps) # line 1127 - elif command[:1] == "v": # line 1128 - usage.usage(arguments[0], version=True) # line 1128 - else: # line 1129 - Exit("Unknown command '%s'" % command) # line 1129 - Exit(code=0) # regular exit # line 1130 - except (Exception, RuntimeError) as E: # line 1131 - exception(E) # line 1132 - Exit("An internal error occurred in SOS. Please report above message to the project maintainer at https://github.com/ArneBachmann/sos/issues via 'New Issue'.\nPlease state your installed version via 'sos version', and what you were doing") # line 1133 - -def main(): # line 1135 - global debug, info, warn, error # to modify logger # line 1136 - logging.basicConfig(level=level, stream=sys.stderr, format=("%(asctime)-23s %(levelname)-8s %(name)s:%(lineno)d | %(message)s" if '--log' in sys.argv else "%(message)s")) # line 1137 - _log = Logger(logging.getLogger(__name__)) # line 1138 - debug, info, warn, error = _log.debug, _log.info, _log.warn, _log.error # line 1138 - for option in (o for o in ['--log', '--debug', '--verbose', '-v', '--sos', '--vcs'] if o in sys.argv): # clean up program arguments # line 1139 - sys.argv.remove(option) # clean up program arguments # line 1139 - if '--help' in sys.argv or len(sys.argv) < 2: # line 1140 - usage.usage(sys.argv[sys.argv.index('--help') + 1] if '--help' in sys.argv and len(sys.argv) > sys.argv.index('--help') + 1 else None, verbose=verbose) # line 1140 - command = sys.argv[1] if len(sys.argv) > 1 else None # type: _coconut.typing.Optional[str] # line 1141 - root, vcs, cmd = findSosVcsBase() # root is None if no .sos folder exists up the folder tree (still working online); vcs is checkout/repo root folder; cmd is the VCS base command # line 1142 - debug("Detected SOS root folder: %s\nDetected VCS root folder: %s" % (("-" if root is None else root), ("-" if vcs is None else vcs))) # line 1143 - defaults["defaultbranch"] = (lambda _coconut_none_coalesce_item: "default" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(vcsBranches.get(cmd, vcsBranches[SVN])) # sets dynamic default with SVN fallback # line 1144 - defaults["useChangesCommand"] = cmd == "fossil" # sets dynamic default with SVN fallback # line 1145 - if (not force_vcs or force_sos) and (root is not None or (("" if command is None else command))[:2] == "of" or (("_" if command is None else command))[:1] in "hv"): # in offline mode or just going offline TODO what about git config? # line 1146 - cwd = os.getcwd() # line 1147 - os.chdir(cwd if command[:2] == "of" else (cwd if root is None else root)) # line 1148 - parse(vcs, cwd, cmd) # line 1149 - elif force_vcs or cmd is not None: # online mode - delegate to VCS # line 1150 - info("%s: Running '%s %s'" % (usage.COMMAND.upper(), cmd, " ".join(sys.argv[1:]))) # line 1151 - import subprocess # only required in this section # line 1152 - process = subprocess.Popen([cmd] + sys.argv[1:], shell=False, stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr) # line 1153 - inp = "" # type: str # line 1154 - while True: # line 1155 - so, se = process.communicate(input=inp) # line 1156 - if process.returncode is not None: # line 1157 - break # line 1157 - inp = sys.stdin.read() # line 1158 - if sys.argv[1][:2] == "co" and process.returncode == 0: # successful commit - assume now in sync again (but leave meta data folder with potential other feature branches behind until "online") # line 1159 - if root is None: # line 1160 - Exit("Cannot determine VCS root folder: Unable to mark repository as synchronized and will show a warning when leaving offline mode") # line 1160 - m = Metadata(root) # type: Metadata # line 1161 - m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], inSync=True) # mark as committed # line 1162 - m.saveBranches() # line 1163 - else: # line 1164 - Exit("No offline repository present, and unable to detect VCS file tree") # line 1164 + ''' # line 1070 + if verbose: # line 1071 + info(usage.MARKER + "Renaming %r to %r" % (pattern, newPattern)) # line 1071 + force = '--force' in options # type: bool # line 1072 + soft = '--soft' in options # type: bool # line 1073 + if not os.path.exists(encode(relPath.replace(SLASH, os.sep))) and not force: # line 1074 + Exit("Source folder doesn't exist. Use --force to proceed anyway") # line 1074 + m = Metadata() # type: Metadata # line 1075 + patterns = m.branches[m.branch].untracked if negative else m.branches[m.branch].tracked # type: List[str] # line 1076 + matching = fnmatch.filter(os.listdir(relPath.replace(SLASH, os.sep)) if os.path.exists(encode(relPath.replace(SLASH, os.sep))) else [], os.path.basename(pattern)) # type: List[str] # find matching files in source # line 1077 + matching[:] = [f for f in matching if len([n for n in m.c.ignores if fnmatch.fnmatch(f, n)]) == 0 or len([p for p in m.c.ignoresWhitelist if fnmatch.fnmatch(f, p)]) > 0] # line 1078 + if not matching and not force: # line 1079 + Exit("No files match the specified file pattern. Use --force to proceed anyway") # line 1079 + if not (m.track or m.picky): # line 1080 + Exit("Repository is in simple mode. Simply use basic file operations to modify files, then execute 'sos commit' to version the changes") # line 1080 + if pattern not in patterns: # list potential alternatives and exit # line 1081 + for tracked in (t for t in patterns if os.path.dirname(t) == relPath): # for all patterns of the same source folder # line 1082 + alternative = fnmatch.filter(matching, os.path.basename(tracked)) # type: _coconut.typing.Sequence[str] # find if it matches any of the files in the source folder, too # line 1083 + if alternative: # line 1084 + info(" '%s' matches %d files" % (tracked, len(alternative))) # line 1084 + if not (force or soft): # line 1085 + Exit("File pattern '%s' is not tracked on current branch. 'sos move' only works on tracked patterns" % pattern) # line 1085 + basePattern = os.path.basename(pattern) # type: str # pure glob without folder # line 1086 + newBasePattern = os.path.basename(newPattern) # type: str # line 1087 + if basePattern.count("*") < newBasePattern.count("*") or (basePattern.count("?") - basePattern.count("[?]")) < (newBasePattern.count("?") - newBasePattern.count("[?]")) or (basePattern.count("[") - basePattern.count("\\[")) < (newBasePattern.count("[") - newBasePattern.count("\\[")) or (basePattern.count("]") - basePattern.count("\\]")) < (newBasePattern.count("]") - newBasePattern.count("\\]")): # line 1088 + Exit("Glob markers from '%s' to '%s' don't match, cannot move/rename tracked matching files" % (basePattern, newBasePattern)) # line 1092 + oldTokens = None # type: _coconut.typing.Sequence[GlobBlock] # line 1093 + newToken = None # type: _coconut.typing.Sequence[GlobBlock] # line 1093 + oldTokens, newTokens = tokenizeGlobPatterns(os.path.basename(pattern), os.path.basename(newPattern)) # line 1094 + matches = convertGlobFiles(matching, oldTokens, newTokens) # type: _coconut.typing.Sequence[Tuple[str, str]] # computes list of source - target filename pairs # line 1095 + if len({st[1] for st in matches}) != len(matches): # line 1096 + Exit("Some target filenames are not unique and different move/rename actions would point to the same target file") # line 1096 + matches = reorderRenameActions(matches, exitOnConflict=not soft) # attempts to find conflict-free renaming order, or exits # line 1097 + if os.path.exists(encode(newRelPath)): # line 1098 + exists = [filename[1] for filename in matches if os.path.exists(encode(os.path.join(newRelPath, filename[1]).replace(SLASH, os.sep)))] # type: _coconut.typing.Sequence[str] # line 1099 + if exists and not (force or soft): # line 1100 + Exit("%s files would write over existing files in %s cases. Use --force to execute it anyway" % ("Moving" if relPath != newRelPath else "Renaming", "all" if len(exists) == len(matches) else "some")) # line 1100 + else: # line 1101 + os.makedirs(encode(os.path.abspath(newRelPath.replace(SLASH, os.sep)))) # line 1101 + if not soft: # perform actual renaming # line 1102 + for (source, target) in matches: # line 1103 + try: # line 1104 + shutil.move(encode(os.path.abspath(os.path.join(relPath, source).replace(SLASH, os.sep))), encode(os.path.abspath(os.path.join(newRelPath, target).replace(SLASH, os.sep)))) # line 1104 + except Exception as E: # one error can lead to another in case of delicate renaming order # line 1105 + error("Cannot move/rename file '%s' to '%s'" % (source, os.path.join(newRelPath, target))) # one error can lead to another in case of delicate renaming order # line 1105 + patterns[patterns.index(pattern)] = newPattern # line 1106 + m.saveBranches() # line 1107 + +def parse(vcs: 'str', cwd: 'str', cmd: 'str'): # line 1109 + ''' Main operation. root is underlying VCS base dir. main() has already chdir'ed into SOS root folder, cwd is original working directory for add, rm, mv. ''' # line 1110 + debug("Parsing command-line arguments...") # line 1111 + root = os.getcwd() # line 1112 + try: # line 1113 + onlys, excps = parseOnlyOptions(cwd, sys.argv) # extracts folder-relative paths (used in changes, commit, diff, switch, update) # line 1114 + command = sys.argv[1].strip() if len(sys.argv) > 1 else "" # line 1115 + arguments = [c.strip() for c in sys.argv[2:] if not (c.startswith("-") and (len(c) == 2 or c[1] == "-"))] # type: List[_coconut.typing.Optional[str]] # line 1116 + options = [c.strip() for c in sys.argv[2:] if c.startswith("-") and (len(c) == 2 or c[1] == "-")] # options with arguments have to be parsed from sys.argv # line 1117 + debug("Processing command %r with arguments %r and options %r." % (command, [_ for _ in arguments if _ is not None], options)) # line 1118 + if command[:1] in "amr": # line 1119 + relPath, pattern = relativize(root, os.path.join(cwd, arguments[0] if arguments else ".")) # line 1119 + if command[:1] == "m": # line 1120 + if len(arguments) < 2: # line 1121 + Exit("Need a second file pattern argument as target for move command") # line 1121 + newRelPath, newPattern = relativize(root, os.path.join(cwd, arguments[1])) # line 1122 + arguments[:] = (arguments + [None] * 3)[:3] # line 1123 + if command[:1] == "a": # addnot # line 1124 + add(relPath, pattern, options, negative="n" in command) # addnot # line 1124 + elif command[:1] == "b": # line 1125 + branch(arguments[0], arguments[1], options) # line 1125 + elif command[:3] == "com": # line 1126 + commit(arguments[0], options, onlys, excps) # line 1126 + elif command[:2] == "ch": # "changes" (legacy) # line 1127 + changes(arguments[0], options, onlys, excps) # "changes" (legacy) # line 1127 + elif command[:2] == "ci": # line 1128 + commit(arguments[0], options, onlys, excps) # line 1128 + elif command[:3] == 'con': # line 1129 + config(arguments, options) # line 1129 + elif command[:2] == "de": # line 1130 + destroy(arguments[0], options) # line 1130 + elif command[:2] == "di": # line 1131 + diff(arguments[0], options, onlys, excps) # line 1131 + elif command[:2] == "du": # line 1132 + dump(arguments[0], options) # line 1132 + elif command[:1] == "h": # line 1133 + usage.usage(arguments[0], verbose=verbose) # line 1133 + elif command[:2] == "lo": # line 1134 + log(options) # line 1134 + elif command[:2] == "li": # line 1135 + ls(os.path.relpath((lambda _coconut_none_coalesce_item: cwd if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(arguments[0]), root), options) # line 1135 + elif command[:2] == "ls": # line 1136 + ls(os.path.relpath((lambda _coconut_none_coalesce_item: cwd if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(arguments[0]), root), options) # line 1136 + elif command[:1] == "m": # mvnot # line 1137 + move(relPath, pattern, newRelPath, newPattern, options, negative="n" in command) # mvnot # line 1137 + elif command[:2] == "of": # line 1138 + offline(arguments[0], arguments[1], options) # line 1138 + elif command[:2] == "on": # line 1139 + online(options) # line 1139 + elif command[:1] == "p": # line 1140 + publish(arguments[0], cmd, options, onlys, excps) # line 1140 + elif command[:1] == "r": # rmnot # line 1141 + remove(relPath, pattern, negative="n" in command) # rmnot # line 1141 + elif command[:2] == "st": # line 1142 + status(arguments[0], vcs, cmd, options, onlys, excps) # line 1142 + elif command[:2] == "sw": # line 1143 + switch(arguments[0], options, onlys, excps) # line 1143 + elif command[:1] == "u": # line 1144 + update(arguments[0], options, onlys, excps) # line 1144 + elif command[:1] == "v": # line 1145 + usage.usage(arguments[0], version=True) # line 1145 + else: # line 1146 + Exit("Unknown command '%s'" % command) # line 1146 + Exit(code=0) # regular exit # line 1147 + except (Exception, RuntimeError) as E: # line 1148 + exception(E) # line 1149 + Exit("An internal error occurred in SOS. Please report above message to the project maintainer at https://github.com/ArneBachmann/sos/issues via 'New Issue'.\nPlease state your installed version via 'sos version', and what you were doing") # line 1150 + +def main(): # line 1152 + global debug, info, warn, error # to modify logger # line 1153 + logging.basicConfig(level=level, stream=sys.stderr, format=("%(asctime)-23s %(levelname)-8s %(name)s:%(lineno)d | %(message)s" if '--log' in sys.argv else "%(message)s")) # line 1154 + _log = Logger(logging.getLogger(__name__)) # line 1155 + debug, info, warn, error = _log.debug, _log.info, _log.warn, _log.error # line 1155 + for option in (o for o in ['--log', '--debug', '--verbose', '-v', '--sos', '--vcs'] if o in sys.argv): # clean up program arguments # line 1156 + sys.argv.remove(option) # clean up program arguments # line 1156 + if '--help' in sys.argv or len(sys.argv) < 2: # line 1157 + usage.usage(sys.argv[sys.argv.index('--help') + 1] if '--help' in sys.argv and len(sys.argv) > sys.argv.index('--help') + 1 else None, verbose=verbose) # line 1157 + command = sys.argv[1] if len(sys.argv) > 1 else None # type: _coconut.typing.Optional[str] # line 1158 + root, vcs, cmd = findSosVcsBase() # root is None if no .sos folder exists up the folder tree (still working online); vcs is checkout/repo root folder; cmd is the VCS base command # line 1159 + debug("Detected SOS root folder: %s\nDetected VCS root folder: %s" % (("-" if root is None else root), ("-" if vcs is None else vcs))) # line 1160 + defaults["defaultbranch"] = (lambda _coconut_none_coalesce_item: "default" if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(vcsBranches.get(cmd, vcsBranches[SVN])) # sets dynamic default with SVN fallback # line 1161 + defaults["useChangesCommand"] = cmd == "fossil" # sets dynamic default with SVN fallback # line 1162 + if (not force_vcs or force_sos) and (root is not None or (("" if command is None else command))[:2] == "of" or (("_" if command is None else command))[:1] in "hv"): # in offline mode or just going offline TODO what about git config? # line 1163 + cwd = os.getcwd() # line 1164 + os.chdir(cwd if command[:2] == "of" else (cwd if root is None else root)) # line 1165 + parse(vcs, cwd, cmd) # line 1166 + elif force_vcs or cmd is not None: # online mode - delegate to VCS # line 1167 + info("%s: Running '%s %s'" % (usage.COMMAND.upper(), cmd, " ".join(sys.argv[1:]))) # line 1168 + import subprocess # only required in this section # line 1169 + process = subprocess.Popen([cmd] + sys.argv[1:], shell=False, stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr) # line 1170 + inp = "" # type: str # line 1171 + while True: # line 1172 + so, se = process.communicate(input=inp) # line 1173 + if process.returncode is not None: # line 1174 + break # line 1174 + inp = sys.stdin.read() # line 1175 + if sys.argv[1][:2] == "co" and process.returncode == 0: # successful commit - assume now in sync again (but leave meta data folder with potential other feature branches behind until "online") # line 1176 + if root is None: # line 1177 + Exit("Cannot determine VCS root folder: Unable to mark repository as synchronized and will show a warning when leaving offline mode") # line 1177 + m = Metadata(root) # type: Metadata # line 1178 + m.branches[m.branch] = dataCopy(BranchInfo, m.branches[m.branch], inSync=True) # mark as committed # line 1179 + m.saveBranches() # line 1180 + else: # line 1181 + Exit("No offline repository present, and unable to detect VCS file tree") # line 1181 # Main part -force_sos = '--sos' in sys.argv # type: bool # line 1168 -force_vcs = '--vcs' in sys.argv # type: bool # line 1169 -verbose = '--verbose' in sys.argv or '-v' in sys.argv # type: bool # imported from utility, and only modified here # line 1170 -debug_ = os.environ.get("DEBUG", "False").lower() == "true" or '--debug' in sys.argv # type: bool # line 1171 -level = logging.DEBUG if '--debug' in sys.argv else logging.INFO # type: int # line 1172 -_log = Logger(logging.getLogger(__name__)) # line 1173 -debug, info, warn, error = _log.debug, _log.info, _log.warn, _log.error # line 1173 -if __name__ == '__main__': # line 1174 - main() # line 1174 +force_sos = [None] if '--sos' in sys.argv else [] # type: bool # this is a trick allowing to modify the flags from the test suite # line 1185 +force_vcs = [None] if '--vcs' in sys.argv else [] # type: bool # line 1186 +verbose = [None] if '--verbose' in sys.argv or '-v' in sys.argv else [] # type: bool # imported from utility, and only modified here # line 1187 +debug_ = [None] if os.environ.get("DEBUG", "False").lower() == "true" or '--debug' in sys.argv else [] # type: bool # line 1188 +level = logging.DEBUG if '--debug' in sys.argv else logging.INFO # type: int # line 1189 + +_log = Logger(logging.getLogger(__name__)) # line 1191 +debug, info, warn, error = _log.debug, _log.info, _log.warn, _log.error # line 1191 + +if __name__ == '__main__': # line 1193 + main() # line 1193 diff --git a/sos/tests.coco b/sos/tests.coco index 766741f..5d7dc69 100644 --- a/sos/tests.coco +++ b/sos/tests.coco @@ -1,7 +1,7 @@ # Copyright Arne Bachmann # This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. -import codecs, enum, json, logging, os, shutil, sys, time, traceback, unittest, uuid +import codecs, collections, enum, json, logging, os, shutil, sys, time, traceback, unittest, uuid from io import BytesIO, BufferedRandom, TextIOWrapper try: @@ -99,7 +99,6 @@ class Tests(unittest.TestCase): ''' Entire test suite. ''' def setUp(_): - sos.__dict__["verbose"] = True # required when executing tests manually sos.Metadata.singleton = None for entry in os.listdir(testFolder): # cannot remove testFolder on Windows when using TortoiseSVN as VCS resource = os.path.join(testFolder, entry) @@ -121,6 +120,7 @@ class Tests(unittest.TestCase): def assertNotInAny(_, what:str, where:str[]): _.assertFalse(any(what in w for w in where)) + def createFile(_, number:Union[int,str], contents:str = "x" * 10, prefix:str? = None): if prefix and not os.path.exists(prefix): os.makedirs(prefix) with open(("." if prefix is None else prefix) + os.sep + (("file%d" % number) if isinstance(number, int) else number), "wb") as fd: fd.write(contents if isinstance(contents, bytes) else contents.encode("cp1252")) @@ -258,8 +258,8 @@ class Tests(unittest.TestCase): sos.offline(options = ["--strict"]) # b0/r0 = ./file1 _.createFile(2) os.unlink("file1") - sos.commit() # b0/r1 = ./file2 - sos.branch(options = ["--fast", "--last"]) # branch b1 from b0/1 TODO modify once --fast becomes the new normal + sos.commit() # b0/r1 = +./file2 -./file1 + sos.branch(options = ["--fast", "--last"]) # branch b1 from b0/1 TODO modify option switch once --fast becomes the new normal _.assertAllIn([sos.metaFile, sos.metaBack, "b0", "b1"], os.listdir(sos.metaFolder), only = True) _.createFile(3) sos.commit() # b1/r2 = ./file2, ./file3 @@ -267,8 +267,8 @@ class Tests(unittest.TestCase): sos.branch(options = ["--fast", "--last"]) # branch b2 from b1/2 sos.destroy("0") # remove parent of b1 and transitive parent of b2 _.assertAllIn([sos.metaFile, sos.metaBack, "b0_last", "b1", "b2"], os.listdir(sos.metaFolder), only = True) # branch 0 was removed - _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(1)), only = True) # revisions were copied to branch 1 - _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(2)), only = True) # revisions were copied to branch 1 + _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(1)), only = True) # all revisions before branch point were copied to branch 1 + _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(2)), only = True) # TODO test also other functions like status --repo, log def testModificationWithOldRevisionRecognition(_): @@ -285,7 +285,7 @@ class Tests(unittest.TestCase): _.assertAllIn(["", ""], out) def testGetParentBranch(_): - m = sos.Accessor({"branches": {0: sos.Accessor({"parent": None, "revision": None}), 1: sos.Accessor({"parent": 0, "revision": 1})}}) + m = sos.Accessor({"branches": {0: sos.Accessor({"parent": None, "revision": None}), 1: sos.Accessor({"parent": 0, "revision": 1})}, "getParentBranches": lambda b, r: sos.Metadata.getParentBranches(m, b, r)}) # stupid workaround for the self-reference in the implementation _.assertEqual(0, sos.Metadata.getParentBranch(m, 1, 0)) _.assertEqual(0, sos.Metadata.getParentBranch(m, 1, 1)) _.assertEqual(1, sos.Metadata.getParentBranch(m, 1, 2)) @@ -523,8 +523,9 @@ class Tests(unittest.TestCase): sos.switch("test", ["--force"]) # should restore file1 and remove file3 _.assertTrue(_.existsFile(1)) # was restored from branch's revision r1 _.assertFalse(_.existsFile(3)) # was restored from branch's revision r1 + sos.verbose.append(None) # dict access necessary, as references on module-top-level are frozen out = wrapChannels(-> sos.dump("dumped.sos.zip", options = ["--skip-backup", "--full"])).replace("\r", "") - _.assertAllIn(["Dumping revisions"], out) + _.assertAllIn(["Dumping revisions"], out) # TODO cannot set verbose flag afer module loading. Use transparent wrapper instead _.assertNotIn("Creating backup", out) out = wrapChannels(-> sos.dump("dumped.sos.zip", options = ["--skip-backup"])).replace("\r", "") _.assertIn("Dumping revisions", out) @@ -532,6 +533,7 @@ class Tests(unittest.TestCase): out = wrapChannels(-> sos.dump("dumped.sos.zip", options = ["--full"])).replace("\r", "") _.assertAllIn(["Creating backup"], out) _.assertIn("Dumping revisions", out) + sos.verbose.pop() def testAutoDetectVCS(_): os.mkdir(".git") @@ -943,6 +945,33 @@ class Tests(unittest.TestCase): _.createFile(3, "y" * 10) # make a change sos.destroy("added", "--force") # should succeed + def testFastBranchingOnEmptyHistory(_): + ''' Test fast branching without revisions and with them. ''' + sos.offline(options = ["--strict", "--compress"]) # b0 + sos.branch("", "", options = ["--fast", "--last"]) # b1 + sos.branch("", "", options = ["--fast", "--last"]) # b2 + sos.branch("", "", options = ["--fast", "--last"]) # b3 + sos.destroy("2") + out = wrapChannels(-> sos.status()).replace("\r", "") + _.assertIn("b0 'trunk' @", out) + _.assertIn("b1 @", out) + _.assertIn("b3 @", out) + _.assertNotIn("b2 @", out) + sos.branch("", "") # non-fast branching of b4 + _.createFile(1) + _.createFile(2) + sos.commit("") + sos.branch("", "", options = ["--fast", "--last"]) # b5 + sos.destroy("4") + out = wrapChannels(-> sos.status()).replace("\r", "") + _.assertIn("b0 'trunk' @", out) + _.assertIn("b1 @", out) + _.assertIn("b3 @", out) + _.assertIn("b5 @", out) + _.assertNotIn("b2 @", out) + _.assertNotIn("b4 @", out) + # TODO add more files and branch again + def testUsage(_): try: sos.usage(); _.fail() # TODO expect sys.exit(0) except: pass @@ -1128,6 +1157,7 @@ class Tests(unittest.TestCase): # TODO tests for loadcommit redirection # TODO test wrong branch/revision after fast branching, would raise exception for -1 otherwise + if __name__ == '__main__': logging.basicConfig(level = logging.DEBUG, stream = sys.stderr, format = "%(asctime)-23s %(levelname)-8s %(name)s:%(lineno)d | %(message)s" if '--log' in sys.argv else "%(message)s") unittest.main(testRunner = debugTestRunner() if '-v' in sys.argv and not os.getenv("CI", "false").lower() == "true" else None) # warnings = "ignore") diff --git a/sos/tests.py b/sos/tests.py index da67308..dca0dd7 100644 --- a/sos/tests.py +++ b/sos/tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# __coconut_hash__ = 0xae15dafd +# __coconut_hash__ = 0x211c82d7 # Compiled with Coconut version 1.3.1-post_dev28 [Dead Parrot] @@ -22,6 +22,7 @@ # This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. import codecs # line 4 +import collections # line 4 import enum # line 4 import json # line 4 import logging # line 4 @@ -157,33 +158,33 @@ class Tests(unittest.TestCase): # line 98 ''' Entire test suite. ''' # line 99 def setUp(_): # line 101 - sos.__dict__["verbose"] = True # required when executing tests manually # line 102 - sos.Metadata.singleton = None # line 103 - for entry in os.listdir(testFolder): # cannot remove testFolder on Windows when using TortoiseSVN as VCS # line 104 - resource = os.path.join(testFolder, entry) # line 105 - shutil.rmtree(sos.encode(resource)) if os.path.isdir(sos.encode(resource)) else os.unlink(sos.encode(resource)) # line 106 - os.chdir(testFolder) # line 107 + sos.Metadata.singleton = None # line 102 + for entry in os.listdir(testFolder): # cannot remove testFolder on Windows when using TortoiseSVN as VCS # line 103 + resource = os.path.join(testFolder, entry) # line 104 + shutil.rmtree(sos.encode(resource)) if os.path.isdir(sos.encode(resource)) else os.unlink(sos.encode(resource)) # line 105 + os.chdir(testFolder) # line 106 - def assertAllIn(_, what: '_coconut.typing.Sequence[str]', where: 'Union[str, List[str]]', only: 'bool'=False): # line 110 - for w in what: # line 111 - _.assertIn(w, where) # line 111 - if only: # line 112 - _.assertEqual(len(what), len(where)) # line 112 + def assertAllIn(_, what: '_coconut.typing.Sequence[str]', where: 'Union[str, List[str]]', only: 'bool'=False): # line 109 + for w in what: # line 110 + _.assertIn(w, where) # line 110 + if only: # line 111 + _.assertEqual(len(what), len(where)) # line 111 - def assertAllNotIn(_, what: '_coconut.typing.Sequence[str]', where: 'Union[str, List[str]]'): # line 114 - for w in what: # line 115 - _.assertNotIn(w, where) # line 115 + def assertAllNotIn(_, what: '_coconut.typing.Sequence[str]', where: 'Union[str, List[str]]'): # line 113 + for w in what: # line 114 + _.assertNotIn(w, where) # line 114 - def assertInAll(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 117 - for w in where: # line 118 - _.assertIn(what, w) # line 118 + def assertInAll(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 116 + for w in where: # line 117 + _.assertIn(what, w) # line 117 - def assertInAny(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 120 - _.assertTrue(any((what in w for w in where))) # line 120 + def assertInAny(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 119 + _.assertTrue(any((what in w for w in where))) # line 119 + + def assertNotInAny(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 121 + _.assertFalse(any((what in w for w in where))) # line 121 - def assertNotInAny(_, what: 'str', where: '_coconut.typing.Sequence[str]'): # line 122 - _.assertFalse(any((what in w for w in where))) # line 122 def createFile(_, number: 'Union[int, str]', contents: 'str'="x" * 10, prefix: '_coconut.typing.Optional[str]'=None): # line 124 if prefix and not os.path.exists(prefix): # line 125 @@ -343,8 +344,8 @@ def testFastBranching(_): # line 256 sos.offline(options=["--strict"]) # b0/r0 = ./file1 # line 258 _.createFile(2) # line 259 os.unlink("file1") # line 260 - sos.commit() # b0/r1 = ./file2 # line 261 - sos.branch(options=["--fast", "--last"]) # branch b1 from b0/1 TODO modify once --fast becomes the new normal # line 262 + sos.commit() # b0/r1 = +./file2 -./file1 # line 261 + sos.branch(options=["--fast", "--last"]) # branch b1 from b0/1 TODO modify option switch once --fast becomes the new normal # line 262 _.assertAllIn([sos.metaFile, sos.metaBack, "b0", "b1"], os.listdir(sos.metaFolder), only=True) # line 263 _.createFile(3) # line 264 sos.commit() # b1/r2 = ./file2, ./file3 # line 265 @@ -352,8 +353,8 @@ def testFastBranching(_): # line 256 sos.branch(options=["--fast", "--last"]) # branch b2 from b1/2 # line 267 sos.destroy("0") # remove parent of b1 and transitive parent of b2 # line 268 _.assertAllIn([sos.metaFile, sos.metaBack, "b0_last", "b1", "b2"], os.listdir(sos.metaFolder), only=True) # branch 0 was removed # line 269 - _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(1)), only=True) # revisions were copied to branch 1 # line 270 - _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(2)), only=True) # revisions were copied to branch 1 # line 271 + _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(1)), only=True) # all revisions before branch point were copied to branch 1 # line 270 + _.assertAllIn([sos.metaFile, "r0", "r1", "r2"], os.listdir(sos.branchFolder(2)), only=True) # line 271 # TODO test also other functions like status --repo, log def testModificationWithOldRevisionRecognition(_): # line 274 @@ -370,7 +371,7 @@ def testModificationWithOldRevisionRecognition(_): # line 274 _.assertAllIn(["", ""], out) # line 285 def testGetParentBranch(_): # line 287 - m = sos.Accessor({"branches": {0: sos.Accessor({"parent": None, "revision": None}), 1: sos.Accessor({"parent": 0, "revision": 1})}}) # line 288 + m = sos.Accessor({"branches": {0: sos.Accessor({"parent": None, "revision": None}), 1: sos.Accessor({"parent": 0, "revision": 1})}, "getParentBranches": lambda b, r: sos.Metadata.getParentBranches(m, b, r)}) # stupid workaround for the self-reference in the implementation # line 288 _.assertEqual(0, sos.Metadata.getParentBranch(m, 1, 0)) # line 289 _.assertEqual(0, sos.Metadata.getParentBranch(m, 1, 1)) # line 290 _.assertEqual(1, sos.Metadata.getParentBranch(m, 1, 2)) # line 291 @@ -628,706 +629,735 @@ def testSwitch(_): # line 492 sos.switch("test", ["--force"]) # should restore file1 and remove file3 # line 523 _.assertTrue(_.existsFile(1)) # was restored from branch's revision r1 # line 524 _.assertFalse(_.existsFile(3)) # was restored from branch's revision r1 # line 525 - out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--skip-backup", "--full"])).replace("\r", "") # line 526 - _.assertAllIn(["Dumping revisions"], out) # line 527 - _.assertNotIn("Creating backup", out) # line 528 - out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--skip-backup"])).replace("\r", "") # line 529 - _.assertIn("Dumping revisions", out) # line 530 - _.assertNotIn("Creating backup", out) # line 531 - out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--full"])).replace("\r", "") # line 532 - _.assertAllIn(["Creating backup"], out) # line 533 - _.assertIn("Dumping revisions", out) # line 534 - - def testAutoDetectVCS(_): # line 536 - os.mkdir(".git") # line 537 - sos.offline(sos.vcsBranches[sos.findSosVcsBase()[2]]) # create initial branch # line 538 - with open(sos.metaFolder + os.sep + sos.metaFile, "r") as fd: # line 539 - meta = fd.read() # line 539 - _.assertTrue("\"master\"" in meta) # line 540 - os.rmdir(".git") # line 541 - - def testUpdate(_): # line 543 - sos.offline("trunk") # create initial branch b0/r0 # line 544 - _.createFile(1, "x" * 100) # line 545 - sos.commit("second") # create b0/r1 # line 546 - - sos.switch("/0") # go back to b0/r0 - deletes file1 # line 548 - _.assertFalse(_.existsFile(1)) # line 549 - - sos.update("/1") # recreate file1 # line 551 - _.assertTrue(_.existsFile(1)) # line 552 - - sos.commit("third", ["--force"]) # force because nothing to commit. should create r2 with same contents as r1, but as differential from r1, not from r0 (= no changes in meta folder) # line 554 - _.assertTrue(os.path.exists(sos.revisionFolder(0, 2))) # line 555 - _.assertTrue(os.path.exists(sos.revisionFolder(0, 2, file=sos.metaFile))) # line 556 - _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 2)))) # only meta data file, no differential files # line 557 - - sos.update("/1") # do nothing, as nothing has changed # line 559 - _.assertTrue(_.existsFile(1)) # line 560 - - _.createFile(2, "y" * 100) # line 562 + sos.verbose.append(None) # dict access necessary, as references on module-top-level are frozen # line 526 + out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--skip-backup", "--full"])).replace("\r", "") # line 527 + _.assertAllIn(["Dumping revisions"], out) # TODO cannot set verbose flag afer module loading. Use transparent wrapper instead # line 528 + _.assertNotIn("Creating backup", out) # line 529 + out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--skip-backup"])).replace("\r", "") # line 530 + _.assertIn("Dumping revisions", out) # line 531 + _.assertNotIn("Creating backup", out) # line 532 + out = wrapChannels(lambda _=None: sos.dump("dumped.sos.zip", options=["--full"])).replace("\r", "") # line 533 + _.assertAllIn(["Creating backup"], out) # line 534 + _.assertIn("Dumping revisions", out) # line 535 + sos.verbose.pop() # line 536 + + def testAutoDetectVCS(_): # line 538 + os.mkdir(".git") # line 539 + sos.offline(sos.vcsBranches[sos.findSosVcsBase()[2]]) # create initial branch # line 540 + with open(sos.metaFolder + os.sep + sos.metaFile, "r") as fd: # line 541 + meta = fd.read() # line 541 + _.assertTrue("\"master\"" in meta) # line 542 + os.rmdir(".git") # line 543 + + def testUpdate(_): # line 545 + sos.offline("trunk") # create initial branch b0/r0 # line 546 + _.createFile(1, "x" * 100) # line 547 + sos.commit("second") # create b0/r1 # line 548 + + sos.switch("/0") # go back to b0/r0 - deletes file1 # line 550 + _.assertFalse(_.existsFile(1)) # line 551 + + sos.update("/1") # recreate file1 # line 553 + _.assertTrue(_.existsFile(1)) # line 554 + + sos.commit("third", ["--force"]) # force because nothing to commit. should create r2 with same contents as r1, but as differential from r1, not from r0 (= no changes in meta folder) # line 556 + _.assertTrue(os.path.exists(sos.revisionFolder(0, 2))) # line 557 + _.assertTrue(os.path.exists(sos.revisionFolder(0, 2, file=sos.metaFile))) # line 558 + _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 2)))) # only meta data file, no differential files # line 559 + + sos.update("/1") # do nothing, as nothing has changed # line 561 + _.assertTrue(_.existsFile(1)) # line 562 + + _.createFile(2, "y" * 100) # line 564 # out = wrapChannels(-> sos.branch("other")) # won't comply as there are changes # _.assertIn("--force", out) - sos.branch("other", options=["--force"]) # automatically including file 2 (as we are in simple mode) # line 565 - _.assertTrue(_.existsFile(2)) # line 566 - sos.update("trunk", ["--add"]) # only add stuff # line 567 + sos.branch("other", options=["--force"]) # automatically including file 2 (as we are in simple mode) # line 567 _.assertTrue(_.existsFile(2)) # line 568 - sos.update("trunk") # nothing to do # line 569 - _.assertFalse(_.existsFile(2)) # removes file not present in original branch # line 570 - - theirs = b"a\nb\nc\nd\ne\nf\ng\nh\nx\nx\nj\nk" # line 572 - _.createFile(10, theirs) # line 573 - mine = b"a\nc\nd\ne\ng\nf\nx\nh\ny\ny\nj" # missing "b", inserted g, modified g->x, replace x/x -> y/y, removed k # line 574 - _.createFile(11, mine) # line 575 - _.assertEqual((b"a\nb\nc\nd\ne\nf\ng\nh\nx\nx\nj\nk", b"\n"), sos.merge(filename="." + os.sep + "file10", intoname="." + os.sep + "file11", mergeOperation=sos.MergeOperation.BOTH)) # completely recreated other file # line 576 - _.assertEqual((b'a\nb\nc\nd\ne\ng\nf\ng\nh\ny\ny\nx\nx\nj\nk', b"\n"), sos.merge(filename="." + os.sep + "file10", intoname="." + os.sep + "file11", mergeOperation=sos.MergeOperation.INSERT)) # line 577 - - def testUpdate2(_): # line 579 - _.createFile("test.txt", "x" * 10) # line 580 - sos.offline("trunk", ["--strict"]) # use strict mode, as timestamp differences are too small for testing # line 581 - sync() # line 582 - sos.branch("mod") # line 583 - _.createFile("test.txt", "x" * 5 + "y" * 5) # line 584 - sos.commit("mod") # create b0/r1 # line 585 - sos.switch("trunk", ["--force"]) # should replace contents, force in case some other files were modified (e.g. during working on the code) TODO investigate more # line 586 - _.assertTrue(_.existsFile("test.txt", b"x" * 10)) # line 587 - sos.update("mod") # integrate changes TODO same with ask -> theirs # line 588 - _.existsFile("test.txt", b"x" * 5 + b"y" * 5) # line 589 - _.createFile("test.txt", "x" * 10) # line 590 - mockInput(["t"], lambda _=None: sos.update("mod", ["--ask-lines"])) # line 591 - sync() # line 592 - _.assertTrue(_.existsFile("test.txt", b"x" * 5 + b"y" * 5)) # line 593 - _.createFile("test.txt", "x" * 5 + "z" + "y" * 4) # line 594 - sos.update("mod") # auto-insert/removes (no intra-line conflict) # line 595 + sos.update("trunk", ["--add"]) # only add stuff # line 569 + _.assertTrue(_.existsFile(2)) # line 570 + sos.update("trunk") # nothing to do # line 571 + _.assertFalse(_.existsFile(2)) # removes file not present in original branch # line 572 + + theirs = b"a\nb\nc\nd\ne\nf\ng\nh\nx\nx\nj\nk" # line 574 + _.createFile(10, theirs) # line 575 + mine = b"a\nc\nd\ne\ng\nf\nx\nh\ny\ny\nj" # missing "b", inserted g, modified g->x, replace x/x -> y/y, removed k # line 576 + _.createFile(11, mine) # line 577 + _.assertEqual((b"a\nb\nc\nd\ne\nf\ng\nh\nx\nx\nj\nk", b"\n"), sos.merge(filename="." + os.sep + "file10", intoname="." + os.sep + "file11", mergeOperation=sos.MergeOperation.BOTH)) # completely recreated other file # line 578 + _.assertEqual((b'a\nb\nc\nd\ne\ng\nf\ng\nh\ny\ny\nx\nx\nj\nk', b"\n"), sos.merge(filename="." + os.sep + "file10", intoname="." + os.sep + "file11", mergeOperation=sos.MergeOperation.INSERT)) # line 579 + + def testUpdate2(_): # line 581 + _.createFile("test.txt", "x" * 10) # line 582 + sos.offline("trunk", ["--strict"]) # use strict mode, as timestamp differences are too small for testing # line 583 + sync() # line 584 + sos.branch("mod") # line 585 + _.createFile("test.txt", "x" * 5 + "y" * 5) # line 586 + sos.commit("mod") # create b0/r1 # line 587 + sos.switch("trunk", ["--force"]) # should replace contents, force in case some other files were modified (e.g. during working on the code) TODO investigate more # line 588 + _.assertTrue(_.existsFile("test.txt", b"x" * 10)) # line 589 + sos.update("mod") # integrate changes TODO same with ask -> theirs # line 590 + _.existsFile("test.txt", b"x" * 5 + b"y" * 5) # line 591 + _.createFile("test.txt", "x" * 10) # line 592 + mockInput(["t"], lambda _=None: sos.update("mod", ["--ask-lines"])) # line 593 + sync() # line 594 + _.assertTrue(_.existsFile("test.txt", b"x" * 5 + b"y" * 5)) # line 595 _.createFile("test.txt", "x" * 5 + "z" + "y" * 4) # line 596 - sync() # line 597 - mockInput(["t"], lambda _=None: sos.update("mod", ["--ask"])) # same as above with interaction -> use theirs (overwrite current file state) # line 598 - _.assertTrue(_.existsFile("test.txt", b"x" * 5 + b"y" * 5)) # line 599 - - def testIsTextType(_): # line 601 - m = sos.Metadata(".") # line 602 - m.c.texttype = ["*.x", "*.md", "*.md.*"] # line 603 - m.c.bintype = ["*.md.confluence"] # line 604 - _.assertTrue(m.isTextType("ab.txt")) # line 605 - _.assertTrue(m.isTextType("./ab.txt")) # line 606 - _.assertTrue(m.isTextType("bc/ab.txt")) # line 607 - _.assertFalse(m.isTextType("bc/ab.")) # line 608 - _.assertTrue(m.isTextType("23_3.x.x")) # line 609 - _.assertTrue(m.isTextType("dfg/dfglkjdf7/test.md")) # line 610 - _.assertTrue(m.isTextType("./test.md.pdf")) # line 611 - _.assertFalse(m.isTextType("./test_a.md.confluence")) # line 612 - - def testEolDet(_): # line 614 - ''' Check correct end-of-line detection. ''' # line 615 - _.assertEqual(b"\n", sos.eoldet(b"a\nb")) # line 616 - _.assertEqual(b"\r\n", sos.eoldet(b"a\r\nb\r\n")) # line 617 - _.assertEqual(b"\r", sos.eoldet(b"\ra\rb")) # line 618 - _.assertAllIn(["Inconsistent", "with "], wrapChannels(lambda: _.assertEqual(b"\n", sos.eoldet(b"\r\na\r\nb\n")))) # line 619 - _.assertAllIn(["Inconsistent", "without"], wrapChannels(lambda: _.assertEqual(b"\n", sos.eoldet(b"\ra\nnb\n")))) # line 620 - _.assertIsNone(sos.eoldet(b"")) # line 621 - _.assertIsNone(sos.eoldet(b"sdf")) # line 622 - - def testMerge(_): # line 624 - ''' Check merge results depending on user options. ''' # line 625 - a = b"a\nb\ncc\nd" # type: bytes # line 626 - b = b"a\nb\nee\nd" # type: bytes # replaces cc by ee # line 627 - _.assertEqual(b"a\nb\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT)[0]) # one-line block replacement using lineMerge # line 628 - _.assertEqual(b"a\nb\neecc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT, charMergeOperation=sos.MergeOperation.INSERT)[0]) # means insert changes from a into b, but don't replace # line 629 - _.assertEqual(b"a\nb\n\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT, charMergeOperation=sos.MergeOperation.REMOVE)[0]) # means insert changes from a into b, but don't replace # line 630 - _.assertEqual(b"a\nb\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE)[0]) # one-line block replacement using lineMerge # line 631 - _.assertEqual(b"a\nb\n\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE, charMergeOperation=sos.MergeOperation.REMOVE)[0]) # line 632 - _.assertEqual(a, sos.merge(a, b, mergeOperation=sos.MergeOperation.BOTH)[0]) # keeps any changes in b # line 633 - a = b"a\nb\ncc\nd" # line 634 - b = b"a\nb\nee\nf\nd" # replaces cc by block of two lines ee, f # line 635 - _.assertEqual(b"a\nb\nee\nf\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT)[0]) # multi-line block replacement # line 636 - _.assertEqual(b"a\nb\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE)[0]) # line 637 - _.assertEqual(a, sos.merge(a, b, mergeOperation=sos.MergeOperation.BOTH)[0]) # keeps any changes in b # line 638 + sos.update("mod") # auto-insert/removes (no intra-line conflict) # line 597 + _.createFile("test.txt", "x" * 5 + "z" + "y" * 4) # line 598 + sync() # line 599 + mockInput(["t"], lambda _=None: sos.update("mod", ["--ask"])) # same as above with interaction -> use theirs (overwrite current file state) # line 600 + _.assertTrue(_.existsFile("test.txt", b"x" * 5 + b"y" * 5)) # line 601 + + def testIsTextType(_): # line 603 + m = sos.Metadata(".") # line 604 + m.c.texttype = ["*.x", "*.md", "*.md.*"] # line 605 + m.c.bintype = ["*.md.confluence"] # line 606 + _.assertTrue(m.isTextType("ab.txt")) # line 607 + _.assertTrue(m.isTextType("./ab.txt")) # line 608 + _.assertTrue(m.isTextType("bc/ab.txt")) # line 609 + _.assertFalse(m.isTextType("bc/ab.")) # line 610 + _.assertTrue(m.isTextType("23_3.x.x")) # line 611 + _.assertTrue(m.isTextType("dfg/dfglkjdf7/test.md")) # line 612 + _.assertTrue(m.isTextType("./test.md.pdf")) # line 613 + _.assertFalse(m.isTextType("./test_a.md.confluence")) # line 614 + + def testEolDet(_): # line 616 + ''' Check correct end-of-line detection. ''' # line 617 + _.assertEqual(b"\n", sos.eoldet(b"a\nb")) # line 618 + _.assertEqual(b"\r\n", sos.eoldet(b"a\r\nb\r\n")) # line 619 + _.assertEqual(b"\r", sos.eoldet(b"\ra\rb")) # line 620 + _.assertAllIn(["Inconsistent", "with "], wrapChannels(lambda: _.assertEqual(b"\n", sos.eoldet(b"\r\na\r\nb\n")))) # line 621 + _.assertAllIn(["Inconsistent", "without"], wrapChannels(lambda: _.assertEqual(b"\n", sos.eoldet(b"\ra\nnb\n")))) # line 622 + _.assertIsNone(sos.eoldet(b"")) # line 623 + _.assertIsNone(sos.eoldet(b"sdf")) # line 624 + + def testMerge(_): # line 626 + ''' Check merge results depending on user options. ''' # line 627 + a = b"a\nb\ncc\nd" # type: bytes # line 628 + b = b"a\nb\nee\nd" # type: bytes # replaces cc by ee # line 629 + _.assertEqual(b"a\nb\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT)[0]) # one-line block replacement using lineMerge # line 630 + _.assertEqual(b"a\nb\neecc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT, charMergeOperation=sos.MergeOperation.INSERT)[0]) # means insert changes from a into b, but don't replace # line 631 + _.assertEqual(b"a\nb\n\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT, charMergeOperation=sos.MergeOperation.REMOVE)[0]) # means insert changes from a into b, but don't replace # line 632 + _.assertEqual(b"a\nb\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE)[0]) # one-line block replacement using lineMerge # line 633 + _.assertEqual(b"a\nb\n\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE, charMergeOperation=sos.MergeOperation.REMOVE)[0]) # line 634 + _.assertEqual(a, sos.merge(a, b, mergeOperation=sos.MergeOperation.BOTH)[0]) # keeps any changes in b # line 635 + a = b"a\nb\ncc\nd" # line 636 + b = b"a\nb\nee\nf\nd" # replaces cc by block of two lines ee, f # line 637 + _.assertEqual(b"a\nb\nee\nf\ncc\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.INSERT)[0]) # multi-line block replacement # line 638 + _.assertEqual(b"a\nb\nd", sos.merge(a, b, mergeOperation=sos.MergeOperation.REMOVE)[0]) # line 639 + _.assertEqual(a, sos.merge(a, b, mergeOperation=sos.MergeOperation.BOTH)[0]) # keeps any changes in b # line 640 # Test with change + insert - _.assertEqual(b"a\nb fdcd d\ne", sos.merge(b"a\nb cd d\ne", b"a\nb fdd d\ne", charMergeOperation=sos.MergeOperation.INSERT)[0]) # line 640 - _.assertEqual(b"a\nb d d\ne", sos.merge(b"a\nb cd d\ne", b"a\nb fdd d\ne", charMergeOperation=sos.MergeOperation.REMOVE)[0]) # line 641 + _.assertEqual(b"a\nb fdcd d\ne", sos.merge(b"a\nb cd d\ne", b"a\nb fdd d\ne", charMergeOperation=sos.MergeOperation.INSERT)[0]) # line 642 + _.assertEqual(b"a\nb d d\ne", sos.merge(b"a\nb cd d\ne", b"a\nb fdd d\ne", charMergeOperation=sos.MergeOperation.REMOVE)[0]) # line 643 # Test interactive merge - a = b"a\nb\nb\ne" # block-wise replacement # line 643 - b = b"a\nc\ne" # line 644 - _.assertEqual(b, mockInput(["i"], lambda _=None: sos.merge(a, b, mergeOperation=sos.MergeOperation.ASK)[0])) # line 645 - _.assertEqual(a, mockInput(["t"], lambda _=None: sos.merge(a, b, mergeOperation=sos.MergeOperation.ASK)[0])) # line 646 - a = b"a\nb\ne" # intra-line merge # line 647 - _.assertEqual(b, mockInput(["i"], lambda _=None: sos.merge(a, b, charMergeOperation=sos.MergeOperation.ASK)[0])) # line 648 - _.assertEqual(a, mockInput(["t"], lambda _=None: sos.merge(a, b, charMergeOperation=sos.MergeOperation.ASK)[0])) # line 649 - - def testMergeEol(_): # line 651 - _.assertEqual(b"\r\n", sos.merge(b"a\nb", b"a\r\nb")[1]) # line 652 - _.assertIn("Differing EOL-styles", wrapChannels(lambda _=None: sos.merge(b"a\nb", b"a\r\nb"))) # expects a warning # line 653 - _.assertIn(b"a\r\nb", sos.merge(b"a\nb", b"a\r\nb")[0]) # when in doubt, use "mine" CR-LF # line 654 - _.assertIn(b"a\nb", sos.merge(b"a\nb", b"a\r\nb", eol=True)[0]) # line 655 - _.assertEqual(b"\n", sos.merge(b"a\nb", b"a\r\nb", eol=True)[1]) # line 656 - - def testPickyMode(_): # line 658 - ''' Confirm that picky mode reset tracked patterns after commits. ''' # line 659 - sos.offline("trunk", None, ["--picky"]) # line 660 - changes = sos.changes() # line 661 - _.assertEqual(0, len(changes.additions)) # do not list any existing file as an addition # line 662 - sos.add(".", "./file?", ["--force"]) # line 663 - _.createFile(1, "aa") # line 664 - sos.commit("First") # add one file # line 665 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # line 666 - _.createFile(2, "b") # line 667 - try: # add nothing, because picky # line 668 - sos.commit("Second") # add nothing, because picky # line 668 - except: # line 669 - pass # line 669 - sos.add(".", "./file?") # line 670 - sos.commit("Third") # line 671 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # line 672 - out = wrapChannels(lambda _=None: sos.log()).replace("\r", "") # line 673 - _.assertIn(" r0", out) # line 674 - sys.argv.extend(["-n", "2"]) # line 675 - out = wrapChannels(lambda _=None: sos.log()).replace("\r", "") # line 676 - sys.argv.pop() # line 677 - sys.argv.pop() # line 677 - _.assertNotIn(" r0", out) # because number of log lines was limited by argument # line 678 - _.assertIn(" r1", out) # line 679 - _.assertIn(" * r2", out) # line 680 - try: # line 681 - sos.config(["set", "logLines", "1"], options=["--local"]) # line 681 - except SystemExit as E: # line 682 - _.assertEqual(0, E.code) # line 682 - out = wrapChannels(lambda _=None: sos.log([])).replace("\r", "") # line 683 - _.assertNotIn(" r0", out) # because number of log lines was limited # line 684 - _.assertNotIn(" r1", out) # line 685 - _.assertIn(" * r2", out) # line 686 - _.createFile(3, prefix="sub") # line 687 - sos.add("sub", "sub/file?") # line 688 - changes = sos.changes() # line 689 - _.assertEqual(1, len(changes.additions)) # line 690 - _.assertTrue("sub/file3" in changes.additions) # line 691 - - def testTrackedSubfolder(_): # line 693 - ''' See if patterns for files in sub folders are picked up correctly. ''' # line 694 - os.mkdir("." + os.sep + "sub") # line 695 - sos.offline("trunk", None, ["--track"]) # line 696 - _.createFile(1, "x") # line 697 - _.createFile(1, "x", prefix="sub") # line 698 - sos.add(".", "./file?") # add glob pattern to track # line 699 - sos.commit("First") # line 700 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 701 - sos.add(".", "sub/file?") # add glob pattern to track # line 702 - sos.commit("Second") # one new file + meta # line 703 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 704 - os.unlink("file1") # remove from basefolder # line 705 - _.createFile(2, "y") # line 706 - sos.remove(".", "sub/file?") # line 707 - try: # raises Exit. TODO test the "suggest a pattern" case # line 708 - sos.remove(".", "sub/bla") # raises Exit. TODO test the "suggest a pattern" case # line 708 - _.fail() # raises Exit. TODO test the "suggest a pattern" case # line 708 - except: # line 709 - pass # line 709 - sos.commit("Third") # line 710 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # one new file + meta # line 711 + a = b"a\nb\nb\ne" # block-wise replacement # line 645 + b = b"a\nc\ne" # line 646 + _.assertEqual(b, mockInput(["i"], lambda _=None: sos.merge(a, b, mergeOperation=sos.MergeOperation.ASK)[0])) # line 647 + _.assertEqual(a, mockInput(["t"], lambda _=None: sos.merge(a, b, mergeOperation=sos.MergeOperation.ASK)[0])) # line 648 + a = b"a\nb\ne" # intra-line merge # line 649 + _.assertEqual(b, mockInput(["i"], lambda _=None: sos.merge(a, b, charMergeOperation=sos.MergeOperation.ASK)[0])) # line 650 + _.assertEqual(a, mockInput(["t"], lambda _=None: sos.merge(a, b, charMergeOperation=sos.MergeOperation.ASK)[0])) # line 651 + + def testMergeEol(_): # line 653 + _.assertEqual(b"\r\n", sos.merge(b"a\nb", b"a\r\nb")[1]) # line 654 + _.assertIn("Differing EOL-styles", wrapChannels(lambda _=None: sos.merge(b"a\nb", b"a\r\nb"))) # expects a warning # line 655 + _.assertIn(b"a\r\nb", sos.merge(b"a\nb", b"a\r\nb")[0]) # when in doubt, use "mine" CR-LF # line 656 + _.assertIn(b"a\nb", sos.merge(b"a\nb", b"a\r\nb", eol=True)[0]) # line 657 + _.assertEqual(b"\n", sos.merge(b"a\nb", b"a\r\nb", eol=True)[1]) # line 658 + + def testPickyMode(_): # line 660 + ''' Confirm that picky mode reset tracked patterns after commits. ''' # line 661 + sos.offline("trunk", None, ["--picky"]) # line 662 + changes = sos.changes() # line 663 + _.assertEqual(0, len(changes.additions)) # do not list any existing file as an addition # line 664 + sos.add(".", "./file?", ["--force"]) # line 665 + _.createFile(1, "aa") # line 666 + sos.commit("First") # add one file # line 667 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # line 668 + _.createFile(2, "b") # line 669 + try: # add nothing, because picky # line 670 + sos.commit("Second") # add nothing, because picky # line 670 + except: # line 671 + pass # line 671 + sos.add(".", "./file?") # line 672 + sos.commit("Third") # line 673 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # line 674 + out = wrapChannels(lambda _=None: sos.log()).replace("\r", "") # line 675 + _.assertIn(" r0", out) # line 676 + sys.argv.extend(["-n", "2"]) # line 677 + out = wrapChannels(lambda _=None: sos.log()).replace("\r", "") # line 678 + sys.argv.pop() # line 679 + sys.argv.pop() # line 679 + _.assertNotIn(" r0", out) # because number of log lines was limited by argument # line 680 + _.assertIn(" r1", out) # line 681 + _.assertIn(" * r2", out) # line 682 + try: # line 683 + sos.config(["set", "logLines", "1"], options=["--local"]) # line 683 + except SystemExit as E: # line 684 + _.assertEqual(0, E.code) # line 684 + out = wrapChannels(lambda _=None: sos.log([])).replace("\r", "") # line 685 + _.assertNotIn(" r0", out) # because number of log lines was limited # line 686 + _.assertNotIn(" r1", out) # line 687 + _.assertIn(" * r2", out) # line 688 + _.createFile(3, prefix="sub") # line 689 + sos.add("sub", "sub/file?") # line 690 + changes = sos.changes() # line 691 + _.assertEqual(1, len(changes.additions)) # line 692 + _.assertTrue("sub/file3" in changes.additions) # line 693 + + def testTrackedSubfolder(_): # line 695 + ''' See if patterns for files in sub folders are picked up correctly. ''' # line 696 + os.mkdir("." + os.sep + "sub") # line 697 + sos.offline("trunk", None, ["--track"]) # line 698 + _.createFile(1, "x") # line 699 + _.createFile(1, "x", prefix="sub") # line 700 + sos.add(".", "./file?") # add glob pattern to track # line 701 + sos.commit("First") # line 702 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 703 + sos.add(".", "sub/file?") # add glob pattern to track # line 704 + sos.commit("Second") # one new file + meta # line 705 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 706 + os.unlink("file1") # remove from basefolder # line 707 + _.createFile(2, "y") # line 708 + sos.remove(".", "sub/file?") # line 709 + try: # raises Exit. TODO test the "suggest a pattern" case # line 710 + sos.remove(".", "sub/bla") # raises Exit. TODO test the "suggest a pattern" case # line 710 + _.fail() # raises Exit. TODO test the "suggest a pattern" case # line 710 + except: # line 711 + pass # line 711 + sos.commit("Third") # line 712 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # one new file + meta # line 713 # TODO also check if /file1 and sub/file1 were removed from index - def testTrackedMode(_): # line 714 + def testTrackedMode(_): # line 716 ''' Difference in semantics vs simple mode: - For remote/other branch we can only know and consider tracked files, thus ignoring all complexity stemming from handling addition of untracked files. - For current branch, we can take into account tracked and untracked ones, in theory, but it doesn't make sense. In conclusion, using the union of tracking patterns from both sides to find affected files makes sense, but disallow deleting files not present in remote branch. - ''' # line 719 - sos.offline("test", options=["--track"]) # set up repo in tracking mode (SVN- or gitless-style) # line 720 - _.createFile(1) # line 721 - _.createFile("a123a") # untracked file "a123a" # line 722 - sos.add(".", "./file?") # add glob tracking pattern # line 723 - sos.commit("second") # versions "file1" # line 724 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 725 - out = wrapChannels(lambda _=None: sos.status()).replace("\r", "") # line 726 - _.assertIn(" | ./file?", out) # line 727 - - _.createFile(2) # untracked file "file2" # line 729 - sos.commit("third") # versions "file2" # line 730 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # one new file + meta file # line 731 - - os.mkdir("." + os.sep + "sub") # line 733 - _.createFile(3, prefix="sub") # untracked file "sub/file3" # line 734 - sos.commit("fourth", ["--force"]) # no tracking pattern matches the subfolder # line 735 - _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 3)))) # meta file only, no other tracked path/file # line 736 - - sos.branch("Other") # second branch containing file1 and file2 tracked by "./file?" # line 738 - sos.remove(".", "./file?") # remove tracking pattern, but don't touch previously created and versioned files # line 739 - sos.add(".", "./a*a") # add tracking pattern # line 740 - changes = sos.changes() # should pick up addition only, because tracked, but not the deletion, as not tracked anymore # line 741 - _.assertEqual(0, len(changes.modifications)) # line 742 - _.assertEqual(0, len(changes.deletions)) # not tracked anymore, but contained in version history and not removed # line 743 - _.assertEqual(1, len(changes.additions)) # detected one addition "a123a", but won't recognize untracking files as deletion # line 744 - - sos.commit("Second_2") # line 746 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(1, 1)))) # "a123a" + meta file # line 747 - _.existsFile(1, b"x" * 10) # line 748 - _.existsFile(2, b"x" * 10) # line 749 - - sos.switch("test") # go back to first branch - tracks only "file?", but not "a*a" # line 751 - _.existsFile(1, b"x" * 10) # line 752 - _.existsFile("a123a", b"x" * 10) # line 753 - - sos.update("Other") # integrate tracked files and tracking pattern from second branch into working state of master branch # line 755 - _.assertTrue(os.path.exists("." + os.sep + "file1")) # line 756 - _.assertTrue(os.path.exists("." + os.sep + "a123a")) # line 757 - - _.createFile("axxxa") # new file that should be tracked on "test" now that we integrated "Other" # line 759 - sos.commit("fifth") # create new revision after integrating updates from second branch # line 760 - _.assertEqual(3, len(os.listdir(sos.revisionFolder(0, 4)))) # one new file from other branch + one new in current folder + meta file # line 761 - sos.switch("Other") # switch back to just integrated branch that tracks only "a*a" - shouldn't do anything # line 762 - _.assertTrue(os.path.exists("." + os.sep + "file1")) # line 763 - _.assertTrue(os.path.exists("." + os.sep + "a123a")) # line 764 - _.assertFalse(os.path.exists("." + os.sep + "axxxa")) # because tracked in both branches, but not present in other -> delete in file tree # line 765 + ''' # line 721 + sos.offline("test", options=["--track"]) # set up repo in tracking mode (SVN- or gitless-style) # line 722 + _.createFile(1) # line 723 + _.createFile("a123a") # untracked file "a123a" # line 724 + sos.add(".", "./file?") # add glob tracking pattern # line 725 + sos.commit("second") # versions "file1" # line 726 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # one new file + meta file # line 727 + out = wrapChannels(lambda _=None: sos.status()).replace("\r", "") # line 728 + _.assertIn(" | ./file?", out) # line 729 + + _.createFile(2) # untracked file "file2" # line 731 + sos.commit("third") # versions "file2" # line 732 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # one new file + meta file # line 733 + + os.mkdir("." + os.sep + "sub") # line 735 + _.createFile(3, prefix="sub") # untracked file "sub/file3" # line 736 + sos.commit("fourth", ["--force"]) # no tracking pattern matches the subfolder # line 737 + _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 3)))) # meta file only, no other tracked path/file # line 738 + + sos.branch("Other") # second branch containing file1 and file2 tracked by "./file?" # line 740 + sos.remove(".", "./file?") # remove tracking pattern, but don't touch previously created and versioned files # line 741 + sos.add(".", "./a*a") # add tracking pattern # line 742 + changes = sos.changes() # should pick up addition only, because tracked, but not the deletion, as not tracked anymore # line 743 + _.assertEqual(0, len(changes.modifications)) # line 744 + _.assertEqual(0, len(changes.deletions)) # not tracked anymore, but contained in version history and not removed # line 745 + _.assertEqual(1, len(changes.additions)) # detected one addition "a123a", but won't recognize untracking files as deletion # line 746 + + sos.commit("Second_2") # line 748 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(1, 1)))) # "a123a" + meta file # line 749 + _.existsFile(1, b"x" * 10) # line 750 + _.existsFile(2, b"x" * 10) # line 751 + + sos.switch("test") # go back to first branch - tracks only "file?", but not "a*a" # line 753 + _.existsFile(1, b"x" * 10) # line 754 + _.existsFile("a123a", b"x" * 10) # line 755 + + sos.update("Other") # integrate tracked files and tracking pattern from second branch into working state of master branch # line 757 + _.assertTrue(os.path.exists("." + os.sep + "file1")) # line 758 + _.assertTrue(os.path.exists("." + os.sep + "a123a")) # line 759 + + _.createFile("axxxa") # new file that should be tracked on "test" now that we integrated "Other" # line 761 + sos.commit("fifth") # create new revision after integrating updates from second branch # line 762 + _.assertEqual(3, len(os.listdir(sos.revisionFolder(0, 4)))) # one new file from other branch + one new in current folder + meta file # line 763 + sos.switch("Other") # switch back to just integrated branch that tracks only "a*a" - shouldn't do anything # line 764 + _.assertTrue(os.path.exists("." + os.sep + "file1")) # line 765 + _.assertTrue(os.path.exists("." + os.sep + "a123a")) # line 766 + _.assertFalse(os.path.exists("." + os.sep + "axxxa")) # because tracked in both branches, but not present in other -> delete in file tree # line 767 # TODO test switch --meta - def testLsTracked(_): # line 768 - sos.offline("test", options=["--track"]) # set up repo in tracking mode (SVN- or gitless-style) # line 769 - _.createFile(1) # line 770 - _.createFile("foo") # line 771 - sos.add(".", "./file*") # capture one file # line 772 - sos.ls() # line 773 - out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 774 - _.assertInAny("TRK file1 (file*)", out) # line 775 - _.assertNotInAny("... file1 (file*)", out) # line 776 - _.assertInAny(" foo", out) # line 777 - out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls(options=["--patterns"])).replace("\r", ""), "\n") # line 778 - _.assertInAny("TRK file*", out) # line 779 - _.createFile("a", prefix="sub") # line 780 - sos.add("sub", "sub/a") # line 781 - sos.ls("sub") # line 782 - _.assertIn("TRK a (a)", sos.safeSplit(wrapChannels(lambda _=None: sos.ls("sub")).replace("\r", ""), "\n")) # line 783 - - def testLineMerge(_): # line 785 - _.assertEqual("xabc", sos.lineMerge("xabc", "a bd")) # line 786 - _.assertEqual("xabxxc", sos.lineMerge("xabxxc", "a bd")) # line 787 - _.assertEqual("xa bdc", sos.lineMerge("xabc", "a bd", mergeOperation=sos.MergeOperation.INSERT)) # line 788 - _.assertEqual("ab", sos.lineMerge("xabc", "a bd", mergeOperation=sos.MergeOperation.REMOVE)) # line 789 - - def testCompression(_): # TODO test output ratio/advantage, also depending on compress flag set or not # line 791 - _.createFile(1) # line 792 - sos.offline("master", options=["--force"]) # line 793 - out = wrapChannels(lambda _=None: sos.changes(options=['--progress'])).replace("\r", "").split("\n") # line 794 - _.assertFalse(any(("Compression advantage" in line for line in out))) # simple mode should always print this to stdout # line 795 - _.assertTrue(_.existsFile(sos.revisionFolder(0, 0, file="b9ee10a87f612e299a6eb208210bc0898092a64c48091327cc2aaeee9b764ffa"), b"x" * 10)) # line 796 - setRepoFlag("compress", True) # was plain = uncompressed before # line 797 - _.createFile(2) # line 798 - out = wrapChannels(lambda _=None: sos.commit("Added file2", options=['--progress'])).replace("\r", "").split("\n") # line 799 - _.assertTrue(any(("Compression advantage" in line for line in out))) # line 800 - _.assertTrue(_.existsFile(sos.revisionFolder(0, 1, file="03b69bc801ae11f1ff2a71a50f165996d0ad681b4f822df13329a27e53f0fcd2"))) # exists # line 801 - _.assertFalse(_.existsFile(sos.revisionFolder(0, 1, file="03b69bc801ae11f1ff2a71a50f165996d0ad681b4f822df13329a27e53f0fcd2"), b"x" * 10)) # but is compressed instead # line 802 - - def testLocalConfig(_): # line 804 - sos.offline("bla", options=[]) # line 805 - try: # line 806 - sos.config(["set", "ignores", "one;two"], options=["--local"]) # line 806 - except SystemExit as E: # line 807 - _.assertEqual(0, E.code) # line 807 - _.assertTrue(checkRepoFlag("ignores", value=["one", "two"])) # line 808 - - def testConfigVariations(_): # line 810 - def makeRepo(): # line 811 - try: # line 812 - os.unlink("file1") # line 812 - except: # line 813 - pass # line 813 - sos.offline("master", options=["--force"]) # line 814 - _.createFile(1) # line 815 - sos.commit("Added file1") # line 816 - try: # line 817 - sos.config(["set", "strict", "on"]) # line 817 - except SystemExit as E: # line 818 - _.assertEqual(0, E.code) # line 818 - makeRepo() # line 819 - _.assertTrue(checkRepoFlag("strict", True)) # line 820 - try: # line 821 - sos.config(["set", "strict", "off"]) # line 821 - except SystemExit as E: # line 822 - _.assertEqual(0, E.code) # line 822 - makeRepo() # line 823 - _.assertTrue(checkRepoFlag("strict", False)) # line 824 - try: # line 825 - sos.config(["set", "strict", "yes"]) # line 825 - except SystemExit as E: # line 826 - _.assertEqual(0, E.code) # line 826 - makeRepo() # line 827 - _.assertTrue(checkRepoFlag("strict", True)) # line 828 - try: # line 829 - sos.config(["set", "strict", "no"]) # line 829 - except SystemExit as E: # line 830 - _.assertEqual(0, E.code) # line 830 - makeRepo() # line 831 - _.assertTrue(checkRepoFlag("strict", False)) # line 832 - try: # line 833 - sos.config(["set", "strict", "1"]) # line 833 - except SystemExit as E: # line 834 - _.assertEqual(0, E.code) # line 834 - makeRepo() # line 835 - _.assertTrue(checkRepoFlag("strict", True)) # line 836 - try: # line 837 - sos.config(["set", "strict", "0"]) # line 837 - except SystemExit as E: # line 838 - _.assertEqual(0, E.code) # line 838 - makeRepo() # line 839 - _.assertTrue(checkRepoFlag("strict", False)) # line 840 - try: # line 841 - sos.config(["set", "strict", "true"]) # line 841 - except SystemExit as E: # line 842 - _.assertEqual(0, E.code) # line 842 - makeRepo() # line 843 - _.assertTrue(checkRepoFlag("strict", True)) # line 844 - try: # line 845 - sos.config(["set", "strict", "false"]) # line 845 - except SystemExit as E: # line 846 - _.assertEqual(0, E.code) # line 846 - makeRepo() # line 847 - _.assertTrue(checkRepoFlag("strict", False)) # line 848 - try: # line 849 - sos.config(["set", "strict", "enable"]) # line 849 - except SystemExit as E: # line 850 - _.assertEqual(0, E.code) # line 850 - makeRepo() # line 851 - _.assertTrue(checkRepoFlag("strict", True)) # line 852 - try: # line 853 - sos.config(["set", "strict", "disable"]) # line 853 - except SystemExit as E: # line 854 - _.assertEqual(0, E.code) # line 854 - makeRepo() # line 855 - _.assertTrue(checkRepoFlag("strict", False)) # line 856 - try: # line 857 - sos.config(["set", "strict", "enabled"]) # line 857 - except SystemExit as E: # line 858 - _.assertEqual(0, E.code) # line 858 - makeRepo() # line 859 - _.assertTrue(checkRepoFlag("strict", True)) # line 860 - try: # line 861 - sos.config(["set", "strict", "disabled"]) # line 861 - except SystemExit as E: # line 862 - _.assertEqual(0, E.code) # line 862 - makeRepo() # line 863 - _.assertTrue(checkRepoFlag("strict", False)) # line 864 - try: # line 865 - sos.config(["set", "strict", "nope"]) # line 865 - _.fail() # line 865 - except SystemExit as E: # line 866 - _.assertEqual(1, E.code) # line 866 - - def testLsSimple(_): # line 868 - _.createFile(1) # line 869 - _.createFile("foo") # line 870 - _.createFile("ign1") # line 871 - _.createFile("ign2") # line 872 - _.createFile("bar", prefix="sub") # line 873 - sos.offline("test") # set up repo in tracking mode (SVN- or gitless-style) # line 874 - try: # define an ignore pattern # line 875 - sos.config(["set", "ignores", "ign1"]) # define an ignore pattern # line 875 - except SystemExit as E: # line 876 - _.assertEqual(0, E.code) # line 876 - try: # additional ignore pattern # line 877 - sos.config(["add", "ignores", "ign2"]) # additional ignore pattern # line 877 + def testLsTracked(_): # line 770 + sos.offline("test", options=["--track"]) # set up repo in tracking mode (SVN- or gitless-style) # line 771 + _.createFile(1) # line 772 + _.createFile("foo") # line 773 + sos.add(".", "./file*") # capture one file # line 774 + sos.ls() # line 775 + out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 776 + _.assertInAny("TRK file1 (file*)", out) # line 777 + _.assertNotInAny("... file1 (file*)", out) # line 778 + _.assertInAny(" foo", out) # line 779 + out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls(options=["--patterns"])).replace("\r", ""), "\n") # line 780 + _.assertInAny("TRK file*", out) # line 781 + _.createFile("a", prefix="sub") # line 782 + sos.add("sub", "sub/a") # line 783 + sos.ls("sub") # line 784 + _.assertIn("TRK a (a)", sos.safeSplit(wrapChannels(lambda _=None: sos.ls("sub")).replace("\r", ""), "\n")) # line 785 + + def testLineMerge(_): # line 787 + _.assertEqual("xabc", sos.lineMerge("xabc", "a bd")) # line 788 + _.assertEqual("xabxxc", sos.lineMerge("xabxxc", "a bd")) # line 789 + _.assertEqual("xa bdc", sos.lineMerge("xabc", "a bd", mergeOperation=sos.MergeOperation.INSERT)) # line 790 + _.assertEqual("ab", sos.lineMerge("xabc", "a bd", mergeOperation=sos.MergeOperation.REMOVE)) # line 791 + + def testCompression(_): # TODO test output ratio/advantage, also depending on compress flag set or not # line 793 + _.createFile(1) # line 794 + sos.offline("master", options=["--force"]) # line 795 + out = wrapChannels(lambda _=None: sos.changes(options=['--progress'])).replace("\r", "").split("\n") # line 796 + _.assertFalse(any(("Compression advantage" in line for line in out))) # simple mode should always print this to stdout # line 797 + _.assertTrue(_.existsFile(sos.revisionFolder(0, 0, file="b9ee10a87f612e299a6eb208210bc0898092a64c48091327cc2aaeee9b764ffa"), b"x" * 10)) # line 798 + setRepoFlag("compress", True) # was plain = uncompressed before # line 799 + _.createFile(2) # line 800 + out = wrapChannels(lambda _=None: sos.commit("Added file2", options=['--progress'])).replace("\r", "").split("\n") # line 801 + _.assertTrue(any(("Compression advantage" in line for line in out))) # line 802 + _.assertTrue(_.existsFile(sos.revisionFolder(0, 1, file="03b69bc801ae11f1ff2a71a50f165996d0ad681b4f822df13329a27e53f0fcd2"))) # exists # line 803 + _.assertFalse(_.existsFile(sos.revisionFolder(0, 1, file="03b69bc801ae11f1ff2a71a50f165996d0ad681b4f822df13329a27e53f0fcd2"), b"x" * 10)) # but is compressed instead # line 804 + + def testLocalConfig(_): # line 806 + sos.offline("bla", options=[]) # line 807 + try: # line 808 + sos.config(["set", "ignores", "one;two"], options=["--local"]) # line 808 + except SystemExit as E: # line 809 + _.assertEqual(0, E.code) # line 809 + _.assertTrue(checkRepoFlag("ignores", value=["one", "two"])) # line 810 + + def testConfigVariations(_): # line 812 + def makeRepo(): # line 813 + try: # line 814 + os.unlink("file1") # line 814 + except: # line 815 + pass # line 815 + sos.offline("master", options=["--force"]) # line 816 + _.createFile(1) # line 817 + sos.commit("Added file1") # line 818 + try: # line 819 + sos.config(["set", "strict", "on"]) # line 819 + except SystemExit as E: # line 820 + _.assertEqual(0, E.code) # line 820 + makeRepo() # line 821 + _.assertTrue(checkRepoFlag("strict", True)) # line 822 + try: # line 823 + sos.config(["set", "strict", "off"]) # line 823 + except SystemExit as E: # line 824 + _.assertEqual(0, E.code) # line 824 + makeRepo() # line 825 + _.assertTrue(checkRepoFlag("strict", False)) # line 826 + try: # line 827 + sos.config(["set", "strict", "yes"]) # line 827 + except SystemExit as E: # line 828 + _.assertEqual(0, E.code) # line 828 + makeRepo() # line 829 + _.assertTrue(checkRepoFlag("strict", True)) # line 830 + try: # line 831 + sos.config(["set", "strict", "no"]) # line 831 + except SystemExit as E: # line 832 + _.assertEqual(0, E.code) # line 832 + makeRepo() # line 833 + _.assertTrue(checkRepoFlag("strict", False)) # line 834 + try: # line 835 + sos.config(["set", "strict", "1"]) # line 835 + except SystemExit as E: # line 836 + _.assertEqual(0, E.code) # line 836 + makeRepo() # line 837 + _.assertTrue(checkRepoFlag("strict", True)) # line 838 + try: # line 839 + sos.config(["set", "strict", "0"]) # line 839 + except SystemExit as E: # line 840 + _.assertEqual(0, E.code) # line 840 + makeRepo() # line 841 + _.assertTrue(checkRepoFlag("strict", False)) # line 842 + try: # line 843 + sos.config(["set", "strict", "true"]) # line 843 + except SystemExit as E: # line 844 + _.assertEqual(0, E.code) # line 844 + makeRepo() # line 845 + _.assertTrue(checkRepoFlag("strict", True)) # line 846 + try: # line 847 + sos.config(["set", "strict", "false"]) # line 847 + except SystemExit as E: # line 848 + _.assertEqual(0, E.code) # line 848 + makeRepo() # line 849 + _.assertTrue(checkRepoFlag("strict", False)) # line 850 + try: # line 851 + sos.config(["set", "strict", "enable"]) # line 851 + except SystemExit as E: # line 852 + _.assertEqual(0, E.code) # line 852 + makeRepo() # line 853 + _.assertTrue(checkRepoFlag("strict", True)) # line 854 + try: # line 855 + sos.config(["set", "strict", "disable"]) # line 855 + except SystemExit as E: # line 856 + _.assertEqual(0, E.code) # line 856 + makeRepo() # line 857 + _.assertTrue(checkRepoFlag("strict", False)) # line 858 + try: # line 859 + sos.config(["set", "strict", "enabled"]) # line 859 + except SystemExit as E: # line 860 + _.assertEqual(0, E.code) # line 860 + makeRepo() # line 861 + _.assertTrue(checkRepoFlag("strict", True)) # line 862 + try: # line 863 + sos.config(["set", "strict", "disabled"]) # line 863 + except SystemExit as E: # line 864 + _.assertEqual(0, E.code) # line 864 + makeRepo() # line 865 + _.assertTrue(checkRepoFlag("strict", False)) # line 866 + try: # line 867 + sos.config(["set", "strict", "nope"]) # line 867 + _.fail() # line 867 + except SystemExit as E: # line 868 + _.assertEqual(1, E.code) # line 868 + + def testLsSimple(_): # line 870 + _.createFile(1) # line 871 + _.createFile("foo") # line 872 + _.createFile("ign1") # line 873 + _.createFile("ign2") # line 874 + _.createFile("bar", prefix="sub") # line 875 + sos.offline("test") # set up repo in tracking mode (SVN- or gitless-style) # line 876 + try: # define an ignore pattern # line 877 + sos.config(["set", "ignores", "ign1"]) # define an ignore pattern # line 877 except SystemExit as E: # line 878 _.assertEqual(0, E.code) # line 878 - try: # define a list of ignore patterns # line 879 - sos.config(["set", "ignoresWhitelist", "ign1;ign2"]) # define a list of ignore patterns # line 879 + try: # additional ignore pattern # line 879 + sos.config(["add", "ignores", "ign2"]) # additional ignore pattern # line 879 except SystemExit as E: # line 880 _.assertEqual(0, E.code) # line 880 - out = wrapChannels(lambda _=None: sos.config(["show"])).replace("\r", "") # line 881 - _.assertIn(" ignores [global] ['ign1', 'ign2']", out) # line 882 - out = wrapChannels(lambda _=None: sos.config(["show", "ignores"])).replace("\r", "") # line 883 + try: # define a list of ignore patterns # line 881 + sos.config(["set", "ignoresWhitelist", "ign1;ign2"]) # define a list of ignore patterns # line 881 + except SystemExit as E: # line 882 + _.assertEqual(0, E.code) # line 882 + out = wrapChannels(lambda _=None: sos.config(["show"])).replace("\r", "") # line 883 _.assertIn(" ignores [global] ['ign1', 'ign2']", out) # line 884 - out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 885 - _.assertInAny(' file1', out) # line 886 - _.assertInAny(' ign1', out) # line 887 - _.assertInAny(' ign2', out) # line 888 - _.assertNotIn('DIR sub', out) # line 889 - _.assertNotIn(' bar', out) # line 890 - out = wrapChannels(lambda _=None: sos.ls(options=["--recursive"])).replace("\r", "") # line 891 - _.assertIn('DIR sub', out) # line 892 - _.assertIn(' bar', out) # line 893 - try: # line 894 - sos.config(["rm", "foo", "bar"]) # line 894 - _.fail() # line 894 - except SystemExit as E: # line 895 - _.assertEqual(1, E.code) # line 895 + out = wrapChannels(lambda _=None: sos.config(["show", "ignores"])).replace("\r", "") # line 885 + _.assertIn(" ignores [global] ['ign1', 'ign2']", out) # line 886 + out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 887 + _.assertInAny(' file1', out) # line 888 + _.assertInAny(' ign1', out) # line 889 + _.assertInAny(' ign2', out) # line 890 + _.assertNotIn('DIR sub', out) # line 891 + _.assertNotIn(' bar', out) # line 892 + out = wrapChannels(lambda _=None: sos.ls(options=["--recursive"])).replace("\r", "") # line 893 + _.assertIn('DIR sub', out) # line 894 + _.assertIn(' bar', out) # line 895 try: # line 896 - sos.config(["rm", "ignores", "foo"]) # line 896 + sos.config(["rm", "foo", "bar"]) # line 896 _.fail() # line 896 except SystemExit as E: # line 897 _.assertEqual(1, E.code) # line 897 try: # line 898 - sos.config(["rm", "ignores", "ign1"]) # line 898 + sos.config(["rm", "ignores", "foo"]) # line 898 + _.fail() # line 898 except SystemExit as E: # line 899 - _.assertEqual(0, E.code) # line 899 - try: # remove ignore pattern # line 900 - sos.config(["unset", "ignoresWhitelist"]) # remove ignore pattern # line 900 + _.assertEqual(1, E.code) # line 899 + try: # line 900 + sos.config(["rm", "ignores", "ign1"]) # line 900 except SystemExit as E: # line 901 _.assertEqual(0, E.code) # line 901 - out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 902 - _.assertInAny(' ign1', out) # line 903 - _.assertInAny('IGN ign2', out) # line 904 - _.assertNotInAny(' ign2', out) # line 905 - - def testWhitelist(_): # line 907 + try: # remove ignore pattern # line 902 + sos.config(["unset", "ignoresWhitelist"]) # remove ignore pattern # line 902 + except SystemExit as E: # line 903 + _.assertEqual(0, E.code) # line 903 + out = sos.safeSplit(wrapChannels(lambda _=None: sos.ls()).replace("\r", ""), "\n") # line 904 + _.assertInAny(' ign1', out) # line 905 + _.assertInAny('IGN ign2', out) # line 906 + _.assertNotInAny(' ign2', out) # line 907 + + def testWhitelist(_): # line 909 # TODO test same for simple mode - _.createFile(1) # line 909 - sos.defaults.ignores[:] = ["file*"] # replace in-place # line 910 - sos.offline("xx", options=["--track", "--strict"]) # because nothing to commit due to ignore pattern # line 911 - sos.add(".", "./file*") # add tracking pattern for "file1" # line 912 - sos.commit(options=["--force"]) # attempt to commit the file # line 913 - _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 1)))) # only meta data, file1 was ignored # line 914 - try: # Exit because dirty # line 915 - sos.online() # Exit because dirty # line 915 - _.fail() # Exit because dirty # line 915 - except: # exception expected # line 916 - pass # exception expected # line 916 - _.createFile("x2") # add another change # line 917 - sos.add(".", "./x?") # add tracking pattern for "file1" # line 918 - try: # force beyond dirty flag check # line 919 - sos.online(["--force"]) # force beyond dirty flag check # line 919 - _.fail() # force beyond dirty flag check # line 919 - except: # line 920 - pass # line 920 - sos.online(["--force", "--force"]) # force beyond file tree modifications check # line 921 - _.assertFalse(os.path.exists(sos.metaFolder)) # line 922 - - _.createFile(1) # line 924 - sos.defaults.ignoresWhitelist[:] = ["file*"] # line 925 - sos.offline("xx", None, ["--track"]) # line 926 - sos.add(".", "./file*") # line 927 - sos.commit() # should NOT ask for force here # line 928 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # meta data and "file1", file1 was whitelisted # line 929 - - def testRemove(_): # line 931 - _.createFile(1, "x" * 100) # line 932 - sos.offline("trunk") # line 933 - try: # line 934 - sos.destroy("trunk") # line 934 - _fail() # line 934 - except: # line 935 - pass # line 935 - _.createFile(2, "y" * 10) # line 936 - sos.branch("added") # creates new branch, writes repo metadata, and therefore creates backup copy # line 937 - sos.destroy("trunk") # line 938 - _.assertAllIn([sos.metaFile, sos.metaBack, "b0_last", "b1"], os.listdir("." + os.sep + sos.metaFolder)) # line 939 - _.assertTrue(os.path.exists("." + os.sep + sos.metaFolder + os.sep + "b1")) # line 940 - _.assertFalse(os.path.exists("." + os.sep + sos.metaFolder + os.sep + "b0")) # line 941 - sos.branch("next") # line 942 - _.createFile(3, "y" * 10) # make a change # line 943 - sos.destroy("added", "--force") # should succeed # line 944 - - def testUsage(_): # line 946 - try: # TODO expect sys.exit(0) # line 947 - sos.usage() # TODO expect sys.exit(0) # line 947 - _.fail() # TODO expect sys.exit(0) # line 947 - except: # line 948 - pass # line 948 - try: # TODO expect sys.exit(0) # line 949 - sos.usage("help") # TODO expect sys.exit(0) # line 949 - _.fail() # TODO expect sys.exit(0) # line 949 - except: # line 950 - pass # line 950 - try: # TODO expect sys.exit(0) # line 951 - sos.usage("help", verbose=True) # TODO expect sys.exit(0) # line 951 - _.fail() # TODO expect sys.exit(0) # line 951 - except: # line 952 - pass # line 952 - try: # line 953 - sos.usage(version=True) # line 953 - _.fail() # line 953 - except: # line 954 - pass # line 954 - try: # line 955 - sos.usage(version=True) # line 955 - _.fail() # line 955 - except: # line 956 - pass # line 956 - - def testOnlyExcept(_): # line 958 - ''' Test blacklist glob rules. ''' # line 959 - sos.offline(options=["--track"]) # line 960 - _.createFile("a.1") # line 961 - _.createFile("a.2") # line 962 - _.createFile("b.1") # line 963 - _.createFile("b.2") # line 964 - sos.add(".", "./a.?") # line 965 - sos.add(".", "./?.1", negative=True) # line 966 - out = wrapChannels(lambda _=None: sos.commit()) # line 967 - _.assertIn("ADD ./a.2", out) # line 968 - _.assertNotIn("ADD ./a.1", out) # line 969 - _.assertNotIn("ADD ./b.1", out) # line 970 - _.assertNotIn("ADD ./b.2", out) # line 971 - - def testOnly(_): # line 973 - _.assertEqual((_coconut.frozenset(("./A", "x/B")), _coconut.frozenset(("./C",))), sos.parseOnlyOptions(".", ["abc", "def", "--only", "A", "--x", "--only", "x/B", "--except", "C", "--only"])) # line 974 - _.assertEqual(_coconut.frozenset(("B",)), sos.conditionalIntersection(_coconut.frozenset(("A", "B", "C")), _coconut.frozenset(("B", "D")))) # line 975 - _.assertEqual(_coconut.frozenset(("B", "D")), sos.conditionalIntersection(_coconut.frozenset(), _coconut.frozenset(("B", "D")))) # line 976 - _.assertEqual(_coconut.frozenset(("B", "D")), sos.conditionalIntersection(None, _coconut.frozenset(("B", "D")))) # line 977 - sos.offline(options=["--track", "--strict"]) # line 978 - _.createFile(1) # line 979 - _.createFile(2) # line 980 - sos.add(".", "./file1") # line 981 - sos.add(".", "./file2") # line 982 - sos.commit(onlys=_coconut.frozenset(("./file1",))) # line 983 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # only meta and file1 # line 984 - sos.commit() # adds also file2 # line 985 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # only meta and file1 # line 986 - _.createFile(1, "cc") # modify both files # line 987 - _.createFile(2, "dd") # line 988 - try: # line 989 - sos.config(["set", "texttype", "file2"]) # line 989 - except SystemExit as E: # line 990 - _.assertEqual(0, E.code) # line 990 - changes = sos.changes(excps=_coconut.frozenset(("./file1",))) # line 991 - _.assertEqual(1, len(changes.modifications)) # only file2 # line 992 - _.assertTrue("./file2" in changes.modifications) # line 993 - _.assertAllIn(["DIF ./file2", ""], wrapChannels(lambda _=None: sos.diff(onlys=_coconut.frozenset(("./file2",))))) # line 994 - _.assertAllNotIn(["MOD ./file1", "DIF ./file1", "MOD ./file2"], wrapChannels(lambda _=None: sos.diff(onlys=_coconut.frozenset(("./file2",))))) # MOD vs. DIF # line 995 - _.assertIn("MOD ./file1", wrapChannels(lambda _=None: sos.diff(excps=_coconut.frozenset(("./file2",))))) # MOD vs. DIF # line 996 - _.assertNotIn("MOD ./file2", wrapChannels(lambda _=None: sos.diff(excps=_coconut.frozenset(("./file2",))))) # line 997 - - def testDiff(_): # line 999 - try: # manually mark this file as "textual" # line 1000 - sos.config(["set", "texttype", "file1"]) # manually mark this file as "textual" # line 1000 - except SystemExit as E: # line 1001 - _.assertEqual(0, E.code) # line 1001 - sos.offline(options=["--strict"]) # line 1002 - _.createFile(1) # line 1003 - _.createFile(2) # line 1004 - sos.commit() # line 1005 - _.createFile(1, "sdfsdgfsdf") # line 1006 - _.createFile(2, "12343") # line 1007 - sos.commit() # line 1008 - _.createFile(1, "foobar") # line 1009 - _.createFile(3) # line 1010 - out = wrapChannels(lambda _=None: sos.diff("/-2")) # compare with r1 (second counting from last which is r2) # line 1011 - _.assertIn("ADD ./file3", out) # line 1012 - _.assertAllIn(["MOD ./file2", "DIF ./file1 ", "-~- 0 |xxxxxxxxxx|", "+~+ 0 |foobar|"], out) # line 1013 - _.assertAllNotIn(["MOD ./file1", "DIF ./file1"], wrapChannels(lambda _=None: sos.diff("/-2", onlys=_coconut.frozenset(("./file2",))))) # line 1014 - - def testReorderRenameActions(_): # line 1016 - result = sos.reorderRenameActions([("123", "312"), ("312", "132"), ("321", "123")], exitOnConflict=False) # type: Tuple[str, str] # line 1017 - _.assertEqual([("312", "132"), ("123", "312"), ("321", "123")], result) # line 1018 - try: # line 1019 - sos.reorderRenameActions([("123", "312"), ("312", "123")], exitOnConflict=True) # line 1019 - _.fail() # line 1019 - except: # line 1020 - pass # line 1020 - - def testPublish(_): # line 1022 - pass # TODO how to test without modifying anything underlying? probably use --test flag or similar? # line 1023 - - def testMove(_): # line 1025 - sos.offline(options=["--strict", "--track"]) # line 1026 - _.createFile(1) # line 1027 - sos.add(".", "./file?") # line 1028 + _.createFile(1) # line 911 + sos.defaults.ignores[:] = ["file*"] # replace in-place # line 912 + sos.offline("xx", options=["--track", "--strict"]) # because nothing to commit due to ignore pattern # line 913 + sos.add(".", "./file*") # add tracking pattern for "file1" # line 914 + sos.commit(options=["--force"]) # attempt to commit the file # line 915 + _.assertEqual(1, len(os.listdir(sos.revisionFolder(0, 1)))) # only meta data, file1 was ignored # line 916 + try: # Exit because dirty # line 917 + sos.online() # Exit because dirty # line 917 + _.fail() # Exit because dirty # line 917 + except: # exception expected # line 918 + pass # exception expected # line 918 + _.createFile("x2") # add another change # line 919 + sos.add(".", "./x?") # add tracking pattern for "file1" # line 920 + try: # force beyond dirty flag check # line 921 + sos.online(["--force"]) # force beyond dirty flag check # line 921 + _.fail() # force beyond dirty flag check # line 921 + except: # line 922 + pass # line 922 + sos.online(["--force", "--force"]) # force beyond file tree modifications check # line 923 + _.assertFalse(os.path.exists(sos.metaFolder)) # line 924 + + _.createFile(1) # line 926 + sos.defaults.ignoresWhitelist[:] = ["file*"] # line 927 + sos.offline("xx", None, ["--track"]) # line 928 + sos.add(".", "./file*") # line 929 + sos.commit() # should NOT ask for force here # line 930 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # meta data and "file1", file1 was whitelisted # line 931 + + def testRemove(_): # line 933 + _.createFile(1, "x" * 100) # line 934 + sos.offline("trunk") # line 935 + try: # line 936 + sos.destroy("trunk") # line 936 + _fail() # line 936 + except: # line 937 + pass # line 937 + _.createFile(2, "y" * 10) # line 938 + sos.branch("added") # creates new branch, writes repo metadata, and therefore creates backup copy # line 939 + sos.destroy("trunk") # line 940 + _.assertAllIn([sos.metaFile, sos.metaBack, "b0_last", "b1"], os.listdir("." + os.sep + sos.metaFolder)) # line 941 + _.assertTrue(os.path.exists("." + os.sep + sos.metaFolder + os.sep + "b1")) # line 942 + _.assertFalse(os.path.exists("." + os.sep + sos.metaFolder + os.sep + "b0")) # line 943 + sos.branch("next") # line 944 + _.createFile(3, "y" * 10) # make a change # line 945 + sos.destroy("added", "--force") # should succeed # line 946 + + def testFastBranchingOnEmptyHistory(_): # line 948 + ''' Test fast branching without revisions and with them. ''' # line 949 + sos.offline(options=["--strict", "--compress"]) # b0 # line 950 + sos.branch("", "", options=["--fast", "--last"]) # b1 # line 951 + sos.branch("", "", options=["--fast", "--last"]) # b2 # line 952 + sos.branch("", "", options=["--fast", "--last"]) # b3 # line 953 + sos.destroy("2") # line 954 + out = wrapChannels(lambda _=None: sos.status()).replace("\r", "") # line 955 + _.assertIn("b0 'trunk' @", out) # line 956 + _.assertIn("b1 @", out) # line 957 + _.assertIn("b3 @", out) # line 958 + _.assertNotIn("b2 @", out) # line 959 + sos.branch("", "") # non-fast branching of b4 # line 960 + _.createFile(1) # line 961 + _.createFile(2) # line 962 + sos.commit("") # line 963 + sos.branch("", "", options=["--fast", "--last"]) # b5 # line 964 + sos.destroy("4") # line 965 + out = wrapChannels(lambda _=None: sos.status()).replace("\r", "") # line 966 + _.assertIn("b0 'trunk' @", out) # line 967 + _.assertIn("b1 @", out) # line 968 + _.assertIn("b3 @", out) # line 969 + _.assertIn("b5 @", out) # line 970 + _.assertNotIn("b2 @", out) # line 971 + _.assertNotIn("b4 @", out) # line 972 +# TODO add more files and branch again + + def testUsage(_): # line 975 + try: # TODO expect sys.exit(0) # line 976 + sos.usage() # TODO expect sys.exit(0) # line 976 + _.fail() # TODO expect sys.exit(0) # line 976 + except: # line 977 + pass # line 977 + try: # TODO expect sys.exit(0) # line 978 + sos.usage("help") # TODO expect sys.exit(0) # line 978 + _.fail() # TODO expect sys.exit(0) # line 978 + except: # line 979 + pass # line 979 + try: # TODO expect sys.exit(0) # line 980 + sos.usage("help", verbose=True) # TODO expect sys.exit(0) # line 980 + _.fail() # TODO expect sys.exit(0) # line 980 + except: # line 981 + pass # line 981 + try: # line 982 + sos.usage(version=True) # line 982 + _.fail() # line 982 + except: # line 983 + pass # line 983 + try: # line 984 + sos.usage(version=True) # line 984 + _.fail() # line 984 + except: # line 985 + pass # line 985 + + def testOnlyExcept(_): # line 987 + ''' Test blacklist glob rules. ''' # line 988 + sos.offline(options=["--track"]) # line 989 + _.createFile("a.1") # line 990 + _.createFile("a.2") # line 991 + _.createFile("b.1") # line 992 + _.createFile("b.2") # line 993 + sos.add(".", "./a.?") # line 994 + sos.add(".", "./?.1", negative=True) # line 995 + out = wrapChannels(lambda _=None: sos.commit()) # line 996 + _.assertIn("ADD ./a.2", out) # line 997 + _.assertNotIn("ADD ./a.1", out) # line 998 + _.assertNotIn("ADD ./b.1", out) # line 999 + _.assertNotIn("ADD ./b.2", out) # line 1000 + + def testOnly(_): # line 1002 + _.assertEqual((_coconut.frozenset(("./A", "x/B")), _coconut.frozenset(("./C",))), sos.parseOnlyOptions(".", ["abc", "def", "--only", "A", "--x", "--only", "x/B", "--except", "C", "--only"])) # line 1003 + _.assertEqual(_coconut.frozenset(("B",)), sos.conditionalIntersection(_coconut.frozenset(("A", "B", "C")), _coconut.frozenset(("B", "D")))) # line 1004 + _.assertEqual(_coconut.frozenset(("B", "D")), sos.conditionalIntersection(_coconut.frozenset(), _coconut.frozenset(("B", "D")))) # line 1005 + _.assertEqual(_coconut.frozenset(("B", "D")), sos.conditionalIntersection(None, _coconut.frozenset(("B", "D")))) # line 1006 + sos.offline(options=["--track", "--strict"]) # line 1007 + _.createFile(1) # line 1008 + _.createFile(2) # line 1009 + sos.add(".", "./file1") # line 1010 + sos.add(".", "./file2") # line 1011 + sos.commit(onlys=_coconut.frozenset(("./file1",))) # line 1012 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # only meta and file1 # line 1013 + sos.commit() # adds also file2 # line 1014 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 2)))) # only meta and file1 # line 1015 + _.createFile(1, "cc") # modify both files # line 1016 + _.createFile(2, "dd") # line 1017 + try: # line 1018 + sos.config(["set", "texttype", "file2"]) # line 1018 + except SystemExit as E: # line 1019 + _.assertEqual(0, E.code) # line 1019 + changes = sos.changes(excps=_coconut.frozenset(("./file1",))) # line 1020 + _.assertEqual(1, len(changes.modifications)) # only file2 # line 1021 + _.assertTrue("./file2" in changes.modifications) # line 1022 + _.assertAllIn(["DIF ./file2", ""], wrapChannels(lambda _=None: sos.diff(onlys=_coconut.frozenset(("./file2",))))) # line 1023 + _.assertAllNotIn(["MOD ./file1", "DIF ./file1", "MOD ./file2"], wrapChannels(lambda _=None: sos.diff(onlys=_coconut.frozenset(("./file2",))))) # MOD vs. DIF # line 1024 + _.assertIn("MOD ./file1", wrapChannels(lambda _=None: sos.diff(excps=_coconut.frozenset(("./file2",))))) # MOD vs. DIF # line 1025 + _.assertNotIn("MOD ./file2", wrapChannels(lambda _=None: sos.diff(excps=_coconut.frozenset(("./file2",))))) # line 1026 + + def testDiff(_): # line 1028 + try: # manually mark this file as "textual" # line 1029 + sos.config(["set", "texttype", "file1"]) # manually mark this file as "textual" # line 1029 + except SystemExit as E: # line 1030 + _.assertEqual(0, E.code) # line 1030 + sos.offline(options=["--strict"]) # line 1031 + _.createFile(1) # line 1032 + _.createFile(2) # line 1033 + sos.commit() # line 1034 + _.createFile(1, "sdfsdgfsdf") # line 1035 + _.createFile(2, "12343") # line 1036 + sos.commit() # line 1037 + _.createFile(1, "foobar") # line 1038 + _.createFile(3) # line 1039 + out = wrapChannels(lambda _=None: sos.diff("/-2")) # compare with r1 (second counting from last which is r2) # line 1040 + _.assertIn("ADD ./file3", out) # line 1041 + _.assertAllIn(["MOD ./file2", "DIF ./file1 ", "-~- 0 |xxxxxxxxxx|", "+~+ 0 |foobar|"], out) # line 1042 + _.assertAllNotIn(["MOD ./file1", "DIF ./file1"], wrapChannels(lambda _=None: sos.diff("/-2", onlys=_coconut.frozenset(("./file2",))))) # line 1043 + + def testReorderRenameActions(_): # line 1045 + result = sos.reorderRenameActions([("123", "312"), ("312", "132"), ("321", "123")], exitOnConflict=False) # type: Tuple[str, str] # line 1046 + _.assertEqual([("312", "132"), ("123", "312"), ("321", "123")], result) # line 1047 + try: # line 1048 + sos.reorderRenameActions([("123", "312"), ("312", "123")], exitOnConflict=True) # line 1048 + _.fail() # line 1048 + except: # line 1049 + pass # line 1049 + + def testPublish(_): # line 1051 + pass # TODO how to test without modifying anything underlying? probably use --test flag or similar? # line 1052 + + def testMove(_): # line 1054 + sos.offline(options=["--strict", "--track"]) # line 1055 + _.createFile(1) # line 1056 + sos.add(".", "./file?") # line 1057 # test source folder missing - try: # line 1030 - sos.move("sub", "sub/file?", ".", "?file") # line 1030 - _.fail() # line 1030 - except: # line 1031 - pass # line 1031 + try: # line 1059 + sos.move("sub", "sub/file?", ".", "?file") # line 1059 + _.fail() # line 1059 + except: # line 1060 + pass # line 1060 # test target folder missing: create it - sos.move(".", "./file?", "sub", "sub/file?") # line 1033 - _.assertTrue(os.path.exists("sub")) # line 1034 - _.assertTrue(os.path.exists("sub/file1")) # line 1035 - _.assertFalse(os.path.exists("file1")) # line 1036 + sos.move(".", "./file?", "sub", "sub/file?") # line 1062 + _.assertTrue(os.path.exists("sub")) # line 1063 + _.assertTrue(os.path.exists("sub/file1")) # line 1064 + _.assertFalse(os.path.exists("file1")) # line 1065 # test move - sos.move("sub", "sub/file?", ".", "./?file") # line 1038 - _.assertTrue(os.path.exists("1file")) # line 1039 - _.assertFalse(os.path.exists("sub/file1")) # line 1040 + sos.move("sub", "sub/file?", ".", "./?file") # line 1067 + _.assertTrue(os.path.exists("1file")) # line 1068 + _.assertFalse(os.path.exists("sub/file1")) # line 1069 # test nothing matches source pattern - try: # line 1042 - sos.move(".", "a*", ".", "b*") # line 1042 - _.fail() # line 1042 - except: # line 1043 - pass # line 1043 - sos.add(".", "*") # anything pattern # line 1044 - try: # TODO check that alternative pattern "*" was suggested (1 hit) # line 1045 - sos.move(".", "a*", ".", "b*") # TODO check that alternative pattern "*" was suggested (1 hit) # line 1045 - _.fail() # TODO check that alternative pattern "*" was suggested (1 hit) # line 1045 - except: # line 1046 - pass # line 1046 + try: # line 1071 + sos.move(".", "a*", ".", "b*") # line 1071 + _.fail() # line 1071 + except: # line 1072 + pass # line 1072 + sos.add(".", "*") # anything pattern # line 1073 + try: # TODO check that alternative pattern "*" was suggested (1 hit) # line 1074 + sos.move(".", "a*", ".", "b*") # TODO check that alternative pattern "*" was suggested (1 hit) # line 1074 + _.fail() # TODO check that alternative pattern "*" was suggested (1 hit) # line 1074 + except: # line 1075 + pass # line 1075 # test rename no conflict - _.createFile(1) # line 1048 - _.createFile(2) # line 1049 - _.createFile(3) # line 1050 - sos.add(".", "./file*") # line 1051 - try: # define an ignore pattern # line 1052 - sos.config(["set", "ignores", "file3;file4"]) # define an ignore pattern # line 1052 - except SystemExit as E: # line 1053 - _.assertEqual(0, E.code) # line 1053 - try: # line 1054 - sos.config(["set", "ignoresWhitelist", "file3"]) # line 1054 - except SystemExit as E: # line 1055 - _.assertEqual(0, E.code) # line 1055 - sos.move(".", "./file*", ".", "fi*le") # line 1056 - _.assertTrue(all((os.path.exists("fi%dle" % i) for i in range(1, 4)))) # line 1057 - _.assertFalse(os.path.exists("fi4le")) # line 1058 + _.createFile(1) # line 1077 + _.createFile(2) # line 1078 + _.createFile(3) # line 1079 + sos.add(".", "./file*") # line 1080 + try: # define an ignore pattern # line 1081 + sos.config(["set", "ignores", "file3;file4"]) # define an ignore pattern # line 1081 + except SystemExit as E: # line 1082 + _.assertEqual(0, E.code) # line 1082 + try: # line 1083 + sos.config(["set", "ignoresWhitelist", "file3"]) # line 1083 + except SystemExit as E: # line 1084 + _.assertEqual(0, E.code) # line 1084 + sos.move(".", "./file*", ".", "fi*le") # line 1085 + _.assertTrue(all((os.path.exists("fi%dle" % i) for i in range(1, 4)))) # line 1086 + _.assertFalse(os.path.exists("fi4le")) # line 1087 # test rename solvable conflicts - [_.createFile("%s-%s-%s" % tuple((c for c in n))) for n in ["312", "321", "123", "231"]] # line 1060 + [_.createFile("%s-%s-%s" % tuple((c for c in n))) for n in ["312", "321", "123", "231"]] # line 1089 # sos.move("?-?-?") # test rename unsolvable conflicts # test --soft option - sos.remove(".", "./?file") # was renamed before # line 1064 - sos.add(".", "./?a?b", ["--force"]) # line 1065 - sos.move(".", "./?a?b", ".", "./a?b?", ["--force", "--soft"]) # line 1066 - _.createFile("1a2b") # should not be tracked # line 1067 - _.createFile("a1b2") # should be tracked # line 1068 - sos.commit() # line 1069 - _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # line 1070 - _.assertTrue(os.path.exists(sos.revisionFolder(0, 1, file="93b38f90892eb5c57779ca9c0b6fbdf6774daeee3342f56f3e78eb2fe5336c50"))) # a1b2 # line 1071 - _.createFile("1a1b1") # line 1072 - _.createFile("1a1b2") # line 1073 - sos.add(".", "?a?b*") # line 1074 - _.assertIn("not unique", wrapChannels(lambda _=None: sos.move(".", "?a?b*", ".", "z?z?"))) # should raise error due to same target name # line 1075 + sos.remove(".", "./?file") # was renamed before # line 1093 + sos.add(".", "./?a?b", ["--force"]) # line 1094 + sos.move(".", "./?a?b", ".", "./a?b?", ["--force", "--soft"]) # line 1095 + _.createFile("1a2b") # should not be tracked # line 1096 + _.createFile("a1b2") # should be tracked # line 1097 + sos.commit() # line 1098 + _.assertEqual(2, len(os.listdir(sos.revisionFolder(0, 1)))) # line 1099 + _.assertTrue(os.path.exists(sos.revisionFolder(0, 1, file="93b38f90892eb5c57779ca9c0b6fbdf6774daeee3342f56f3e78eb2fe5336c50"))) # a1b2 # line 1100 + _.createFile("1a1b1") # line 1101 + _.createFile("1a1b2") # line 1102 + sos.add(".", "?a?b*") # line 1103 + _.assertIn("not unique", wrapChannels(lambda _=None: sos.move(".", "?a?b*", ".", "z?z?"))) # should raise error due to same target name # line 1104 # TODO only rename if actually any files are versioned? or simply what is alife? # TODO add test if two single question marks will be moved into adjacent characters - def testAskUpdate(_): # line 1079 - _.createFile(1) # line 1080 - _.createFile(3) # line 1081 - _.createFile(5) # line 1082 - sos.offline() # branch 0: only file1 # line 1083 - sos.branch() # line 1084 - os.unlink("file1") # line 1085 - os.unlink("file3") # line 1086 - os.unlink("file5") # line 1087 - _.createFile(2) # line 1088 - _.createFile(4) # line 1089 - _.createFile(6) # line 1090 - sos.commit() # branch 1: only file2 # line 1091 - sos.switch("0/") # line 1092 - mockInput(["y", "a", "y", "a"], lambda _=None: sos.update("1/", ["--ask"])) # line 1093 - _.assertFalse(_.existsFile(1)) # line 1094 - _.assertFalse(_.existsFile(3)) # line 1095 - _.assertFalse(_.existsFile(5)) # line 1096 - _.assertTrue(_.existsFile(2)) # line 1097 - _.assertTrue(_.existsFile(4)) # line 1098 - _.assertTrue(_.existsFile(6)) # line 1099 - - def testHashCollision(_): # line 1101 - sos.offline() # line 1102 - _.createFile(1) # line 1103 - os.mkdir(sos.revisionFolder(0, 1)) # line 1104 - _.createFile("b9ee10a87f612e299a6eb208210bc0898092a64c48091327cc2aaeee9b764ffa", prefix=sos.revisionFolder(0, 1)) # line 1105 - _.createFile(1) # line 1106 - try: # should exit with error due to collision detection # line 1107 - sos.commit() # should exit with error due to collision detection # line 1107 - _.fail() # should exit with error due to collision detection # line 1107 - except SystemExit as E: # TODO will capture exit(0) which is wrong, change to check code in all places # line 1108 - _.assertEqual(1, E.code) # TODO will capture exit(0) which is wrong, change to check code in all places # line 1108 - - def testFindBase(_): # line 1110 - old = os.getcwd() # line 1111 - try: # line 1112 - os.mkdir("." + os.sep + ".git") # line 1113 - os.makedirs("." + os.sep + "a" + os.sep + sos.metaFolder) # line 1114 - os.makedirs("." + os.sep + "a" + os.sep + "b") # line 1115 - os.chdir("a" + os.sep + "b") # line 1116 - s, vcs, cmd = sos.findSosVcsBase() # line 1117 - _.assertIsNotNone(s) # line 1118 - _.assertIsNotNone(vcs) # line 1119 - _.assertEqual("git", cmd) # line 1120 - finally: # line 1121 - os.chdir(old) # line 1121 + def testAskUpdate(_): # line 1108 + _.createFile(1) # line 1109 + _.createFile(3) # line 1110 + _.createFile(5) # line 1111 + sos.offline() # branch 0: only file1 # line 1112 + sos.branch() # line 1113 + os.unlink("file1") # line 1114 + os.unlink("file3") # line 1115 + os.unlink("file5") # line 1116 + _.createFile(2) # line 1117 + _.createFile(4) # line 1118 + _.createFile(6) # line 1119 + sos.commit() # branch 1: only file2 # line 1120 + sos.switch("0/") # line 1121 + mockInput(["y", "a", "y", "a"], lambda _=None: sos.update("1/", ["--ask"])) # line 1122 + _.assertFalse(_.existsFile(1)) # line 1123 + _.assertFalse(_.existsFile(3)) # line 1124 + _.assertFalse(_.existsFile(5)) # line 1125 + _.assertTrue(_.existsFile(2)) # line 1126 + _.assertTrue(_.existsFile(4)) # line 1127 + _.assertTrue(_.existsFile(6)) # line 1128 + + def testHashCollision(_): # line 1130 + sos.offline() # line 1131 + _.createFile(1) # line 1132 + os.mkdir(sos.revisionFolder(0, 1)) # line 1133 + _.createFile("b9ee10a87f612e299a6eb208210bc0898092a64c48091327cc2aaeee9b764ffa", prefix=sos.revisionFolder(0, 1)) # line 1134 + _.createFile(1) # line 1135 + try: # should exit with error due to collision detection # line 1136 + sos.commit() # should exit with error due to collision detection # line 1136 + _.fail() # should exit with error due to collision detection # line 1136 + except SystemExit as E: # TODO will capture exit(0) which is wrong, change to check code in all places # line 1137 + _.assertEqual(1, E.code) # TODO will capture exit(0) which is wrong, change to check code in all places # line 1137 + + def testFindBase(_): # line 1139 + old = os.getcwd() # line 1140 + try: # line 1141 + os.mkdir("." + os.sep + ".git") # line 1142 + os.makedirs("." + os.sep + "a" + os.sep + sos.metaFolder) # line 1143 + os.makedirs("." + os.sep + "a" + os.sep + "b") # line 1144 + os.chdir("a" + os.sep + "b") # line 1145 + s, vcs, cmd = sos.findSosVcsBase() # line 1146 + _.assertIsNotNone(s) # line 1147 + _.assertIsNotNone(vcs) # line 1148 + _.assertEqual("git", cmd) # line 1149 + finally: # line 1150 + os.chdir(old) # line 1150 # TODO test command line operation --sos vs. --vcs # check exact output instead of only expected exception/fail @@ -1337,6 +1367,7 @@ def testFindBase(_): # line 1110 # TODO tests for loadcommit redirection # TODO test wrong branch/revision after fast branching, would raise exception for -1 otherwise -if __name__ == '__main__': # line 1131 - logging.basicConfig(level=logging.DEBUG, stream=sys.stderr, format="%(asctime)-23s %(levelname)-8s %(name)s:%(lineno)d | %(message)s" if '--log' in sys.argv else "%(message)s") # line 1132 - unittest.main(testRunner=debugTestRunner() if '-v' in sys.argv and not os.getenv("CI", "false").lower() == "true" else None) # warnings = "ignore") # line 1133 + +if __name__ == '__main__': # line 1161 + logging.basicConfig(level=logging.DEBUG, stream=sys.stderr, format="%(asctime)-23s %(levelname)-8s %(name)s:%(lineno)d | %(message)s" if '--log' in sys.argv else "%(message)s") # line 1162 + unittest.main(testRunner=debugTestRunner() if '-v' in sys.argv and not os.getenv("CI", "false").lower() == "true" else None) # warnings = "ignore") # line 1163 diff --git a/sos/usage.coco b/sos/usage.coco index 18dbb62..d5da750 100644 --- a/sos/usage.coco +++ b/sos/usage.coco @@ -352,7 +352,10 @@ OPTIONS:Dict[str,Dict[str?,str]] = { # option -> command (or None = for all) -> "all": { "ls": "Recursively list all files, starting from repository root", "log": """Show all commits since creation of the branch. - Default is only showing the last "logLines" entries""" + Default is only showing the last "logLines" entries""", + "publish": """Commit all files present at offline time, instead of only modifications thereafter. + When going offline with SOS on an underlying VCS checkout with modifications, use this option. + Otherwise - underlying VCS checkout was clean when going offline with SOS - avoid this option.""" }, "a": { "ls": "Recursively list all files, starting from repository root" diff --git a/sos/usage.py b/sos/usage.py index 7b40759..5271c25 100644 --- a/sos/usage.py +++ b/sos/usage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# __coconut_hash__ = 0x351ecac4 +# __coconut_hash__ = 0x5c1c0810 # Compiled with Coconut version 1.3.1-post_dev28 [Dead Parrot] @@ -104,77 +104,79 @@ def __eq__(self, other): # line 38 Cannot be changed via user interface after repository creation. Most commands, however, support a "--strict" option nevertheless"""}, "force": {None: """Executes potentially harmful operations, telling SOS that you really intend to perform that command. Most commands: Ignore uncommitted branches, continue to remove SOS repository metadata folders """, "offline": """If already in offline mode, remove offline repository first before creating empty offline repository anew""", "online": """Ignore uncommitted branches, continue to remove SOS repository metadata folder""", "destroy": """Ignore dirty branches (those with changes not committed back to the underlying VCS) and continue with branch destruction""", "switch": """Override safety check to break switching when file tree contains modifications"""}, "full": {"dump": """Force a full repository dump instead of a differential export"""}, "skip-backup": {"dump": "Don't create a backup of a previous dump archive before dumping the repository" ""}, "changes": {"log": "List differential changeset for each revision"}, "diff": {"log": "Display textual diff for each revision"}, "repo": {"status": """List branches and display repository status (regardless of "useChangesCommand" flag)"""}, "stay": {"branch": "Perform branch operation, but don't switch to newly created branch"}, "last": {"branch": "Use last revision instead of current file tree as basis for new branch. Doesn't affect current file tree"}, "fast": {"branch": "Use the experimental fast branch method. Always implies --last"}, "meta": {"switch": "Only switch the branch's file tracking patterns when switching the branch. Won't update any files"}, "progress": {None: """Display file names during file tree traversal, show processing speed, and show compression advantage, if the "compress" flag is enabled"""}, "log": {None: """Configures the Python logging module to include source details like log level, timestamp, module, and line number with the logged messages"""}, "verbose": {None: "Enable more verbose user output"}, "debug": {None: "Enable logging of internal details (intended for developers only)"}, "only ": {None: """Restrict operation to specified already tracked tracking pattern(s). Available for commands "changes", "commit", "diff", "switch", and "update" """}, "except ": {None: """Avoid operation for specified already tracked tracking pattern(s). Available for commands "changes", "commit", "diff", "switch", and "update" """}, "patterns": {"ls": "Only show tracking patterns"}, "tags": {"ls": "List all repository tags (has nothing to do with file or filepattern listing)"}, "recursive": {"ls": "Recursively list also files in sub-folders"}, "r": {"ls": "Recursively list also files in sub-folders"}, "all": {"ls": "Recursively list all files, starting from repository root", "log": """Show all commits since creation of the branch. - Default is only showing the last "logLines" entries"""}, "a": {"ls": "Recursively list all files, starting from repository root"}, "tag": {"commit": "Store the commit message as a tag that can be used instead of numeric revisions"}, "add": {"switch": "Only add new files"}, "add-lines": {"switch": "Only add inserted lines"}, "add-chars": {"switch": "Only add new characters"}, "rm": {"switch": "Only remove vanished files"}, "rm-lines": {"switch": "Only remove deleted lines"}, "rm-chars": {"switch": "Only remove vanished characters"}, "ask": {"switch": "Ask how to proceed with modified files"}, "ask-lines": {"switch": "Ask how to proceed with modified lines"}, "ask-chars": {"switch": "Ask how to proceed with modified characters"}, "eol": {"switch": "Use EOL style from the integrated file instead. Default: EOL style of current file"}, "ignore-whitespace": {"diff": "Ignore white spaces during comparison"}, "wrap": {"diff": "Wrap text around terminal instead of cropping into terminal width"}, "soft": {"mv": "Don't move or rename files, only affect the tracking pattern"}, "local": {"config set": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config unset": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config add": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config rm": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config show": "Only show configuration settings persisted in local repository, not from user-global settings store"}, "prune": {"config rm": "Remove a list-type parameter together with the last entry"}, "sos": {None: """Pass command and arguments to SOS, even when not in offline mode, e.g. "sos --sos config set key value" to avoid passing the command to Git or SVN"""}, "n": {"log": """Maximum number of entries to show"""}} # type: Dict[str, Dict[_coconut.typing.Optional[str], str]] # line 262 - - -def getTitleFont(text: 'str', width: 'int') -> 'Tuple[str, str]': # line 432 - ''' Finds best fitting font for termimal window width, falling back to SOS marker if nothing fits current terminal width. Returns (actual text, selected Figlet font). ''' # line 433 - x = sorted((t for t in [(max((len(_) for _ in Figlet(font=f, width=999).renderText(text).split("\n"))), f) for f in ["big", "modular", "bell", "nscript", "pebbles", "puffy", "roman", "rounded", "santaclara", "script", "small", "soft", "standard", "univers", "thin"]] if t[0] <= width)) # type: List[Tuple[int, str]] # line 434 - if len(x) == 0: # replace by shortest text # line 435 - text = MARKER # replace by shortest text # line 435 - return (text, sorted((t for t in [(max((len(_) for _ in Figlet(font=f, width=999).renderText(text).split("\n"))), f) for f in ["big", "modular", "bell", "nscript", "pebbles", "puffy", "roman", "rounded", "santaclara", "script", "small", "soft", "standard", "univers", "thin"]] if t[0] <= width))[-1][1]) # line 436 - -@_coconut_tco # https://github.com/pwaller/pyfiglet/blob/master/doc/figfont.txt # line 438 -def getTitle(large: 'bool'=True) -> '_coconut.typing.Optional[str]': # https://github.com/pwaller/pyfiglet/blob/master/doc/figfont.txt # line 438 - ''' Large: use ascii-art. ''' # line 439 - if not large: # line 440 - return APP # line 440 - if not Figlet: # line 441 - return None # line 441 - text, font = getTitleFont(APP, width=pure.termWidth) # line 442 - return _coconut_tail_call("\n".join, (_ for _ in Figlet(font=font, width=pure.termWidth).renderText(text).split("\n") if _.replace(" ", "") != "")) # line 443 - -def usage(argument: 'str', version: 'bool'=False, verbose: 'bool'=False): # line 445 - if version: # line 446 - title = getTitle() # type: _coconut.typing.Optional[str] # line 447 - if title: # line 448 - print(title + "\n") # line 448 - print("%s%s%s" % (MARKER, APPNAME if version else APP, "" if not version else " (PyPI: %s)" % VERSION)) # line 449 - if version: # line 450 - sys.exit(0) # line 450 - category = CategoryAbbrev.get(argument, None) # type: _coconut.typing.Optional[Category] # convert shorthand for category # line 451 - command = argument if category is None else None # type: _coconut.typing.Optional[str] # line 452 - if command is None: # line 453 - print("\nUsage:\n sos [, []] [, [ 0 else 0 # type: int # argument name length max plus indentation # line 463 - for c in cmd.arguments: # line 464 - print(pure.ljust(" %s " % c.name, maxlen) + ("\n" + pure.ljust(width=maxlen)).join(pure.splitStrip(c.long))) # line 464 - matchingoptions = [(optname, pure.splitStrip(description)) for optname, description in [(optname, dikt[name]) for optname, dikt in OPTIONS.items() if name in dikt]] # type: List[Tuple[str, _coconut.typing.Sequence[str]]] # line 465 - if matchingoptions: # line 466 - print("\n Options:") # line 467 - maxoptlen = max([len(optname) for optname, __ in matchingoptions]) # type: int # line 468 - for optname, descriptions in sorted(matchingoptions): # line 469 - if len(descriptions) == 0: # line 470 - continue # line 470 - print(" %s%s %s%s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxoptlen + (0 if len(optname) > 1 else 1)), descriptions[0], "\n" + pure.ajoin(" " * (6 + maxoptlen + (2 if len(optname) > 1 else 1)), descriptions[1:], nl="\n") if len(descriptions) > 1 else "")) # line 471 - matchingoptions = [] if cmd is None else [(optname, pure.splitStrip(dikt[None]) if None in dikt else []) for optname, dikt in OPTIONS.items()] # add all text for the generic description # line 472 - if matchingoptions: # line 473 - print("\n Common options:") # line 474 - maxoptlen = max([len(optname) for optname, __ in matchingoptions]) # line 475 - for optname, descriptions in sorted(matchingoptions): # line 476 - if len(descriptions) == 0: # line 477 - continue # line 477 - print(" %s%s %s%s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxoptlen + (0 if len(optname) > 1 else 1)), descriptions[0], "\n" + pure.ajoin(" " * (6 + maxoptlen + (2 if len(optname) > 1 else 1)), descriptions[1:], nl="\n") if len(descriptions) > 1 else "")) # line 478 - if command is None: # line 479 - print("\nCommon options:") # line 480 - genericOptions = {k: v[None] for k, v in OPTIONS.items() if None in v} # type: Dict[str, str] # line 481 - maxlen = max((len(_) for _ in genericOptions)) # line 482 - for optname, description in sorted(genericOptions.items()): # line 483 - print(" %s%s %s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxlen), pure.ajoin(" " * (2 + 2 + maxlen + 2), pure.splitStrip(description), nl="\n", first=False))) # line 484 + Default is only showing the last "logLines" entries""", "publish": """Commit all files present at offline time, instead of only modifications thereafter. + When going offline with SOS on an underlying VCS checkout with modifications, use this option. + Otherwise - underlying VCS checkout was clean when going offline with SOS - avoid this option."""}, "a": {"ls": "Recursively list all files, starting from repository root"}, "tag": {"commit": "Store the commit message as a tag that can be used instead of numeric revisions"}, "add": {"switch": "Only add new files"}, "add-lines": {"switch": "Only add inserted lines"}, "add-chars": {"switch": "Only add new characters"}, "rm": {"switch": "Only remove vanished files"}, "rm-lines": {"switch": "Only remove deleted lines"}, "rm-chars": {"switch": "Only remove vanished characters"}, "ask": {"switch": "Ask how to proceed with modified files"}, "ask-lines": {"switch": "Ask how to proceed with modified lines"}, "ask-chars": {"switch": "Ask how to proceed with modified characters"}, "eol": {"switch": "Use EOL style from the integrated file instead. Default: EOL style of current file"}, "ignore-whitespace": {"diff": "Ignore white spaces during comparison"}, "wrap": {"diff": "Wrap text around terminal instead of cropping into terminal width"}, "soft": {"mv": "Don't move or rename files, only affect the tracking pattern"}, "local": {"config set": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config unset": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config add": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config rm": "Persist configuration setting in local repository, not in user-global settings store"}, "local": {"config show": "Only show configuration settings persisted in local repository, not from user-global settings store"}, "prune": {"config rm": "Remove a list-type parameter together with the last entry"}, "sos": {None: """Pass command and arguments to SOS, even when not in offline mode, e.g. "sos --sos config set key value" to avoid passing the command to Git or SVN"""}, "n": {"log": """Maximum number of entries to show"""}} # type: Dict[str, Dict[_coconut.typing.Optional[str], str]] # line 262 + + +def getTitleFont(text: 'str', width: 'int') -> 'Tuple[str, str]': # line 435 + ''' Finds best fitting font for termimal window width, falling back to SOS marker if nothing fits current terminal width. Returns (actual text, selected Figlet font). ''' # line 436 + x = sorted((t for t in [(max((len(_) for _ in Figlet(font=f, width=999).renderText(text).split("\n"))), f) for f in ["big", "modular", "bell", "nscript", "pebbles", "puffy", "roman", "rounded", "santaclara", "script", "small", "soft", "standard", "univers", "thin"]] if t[0] <= width)) # type: List[Tuple[int, str]] # line 437 + if len(x) == 0: # replace by shortest text # line 438 + text = MARKER # replace by shortest text # line 438 + return (text, sorted((t for t in [(max((len(_) for _ in Figlet(font=f, width=999).renderText(text).split("\n"))), f) for f in ["big", "modular", "bell", "nscript", "pebbles", "puffy", "roman", "rounded", "santaclara", "script", "small", "soft", "standard", "univers", "thin"]] if t[0] <= width))[-1][1]) # line 439 + +@_coconut_tco # https://github.com/pwaller/pyfiglet/blob/master/doc/figfont.txt # line 441 +def getTitle(large: 'bool'=True) -> '_coconut.typing.Optional[str]': # https://github.com/pwaller/pyfiglet/blob/master/doc/figfont.txt # line 441 + ''' Large: use ascii-art. ''' # line 442 + if not large: # line 443 + return APP # line 443 + if not Figlet: # line 444 + return None # line 444 + text, font = getTitleFont(APP, width=pure.termWidth) # line 445 + return _coconut_tail_call("\n".join, (_ for _ in Figlet(font=font, width=pure.termWidth).renderText(text).split("\n") if _.replace(" ", "") != "")) # line 446 + +def usage(argument: 'str', version: 'bool'=False, verbose: 'bool'=False): # line 448 + if version: # line 449 + title = getTitle() # type: _coconut.typing.Optional[str] # line 450 + if title: # line 451 + print(title + "\n") # line 451 + print("%s%s%s" % (MARKER, APPNAME if version else APP, "" if not version else " (PyPI: %s)" % VERSION)) # line 452 + if version: # line 453 + sys.exit(0) # line 453 + category = CategoryAbbrev.get(argument, None) # type: _coconut.typing.Optional[Category] # convert shorthand for category # line 454 + command = argument if category is None else None # type: _coconut.typing.Optional[str] # line 455 + if command is None: # line 456 + print("\nUsage:\n sos [, []] [, [ 0 else 0 # type: int # argument name length max plus indentation # line 466 + for c in cmd.arguments: # line 467 + print(pure.ljust(" %s " % c.name, maxlen) + ("\n" + pure.ljust(width=maxlen)).join(pure.splitStrip(c.long))) # line 467 + matchingoptions = [(optname, pure.splitStrip(description)) for optname, description in [(optname, dikt[name]) for optname, dikt in OPTIONS.items() if name in dikt]] # type: List[Tuple[str, _coconut.typing.Sequence[str]]] # line 468 + if matchingoptions: # line 469 + print("\n Options:") # line 470 + maxoptlen = max([len(optname) for optname, __ in matchingoptions]) # type: int # line 471 + for optname, descriptions in sorted(matchingoptions): # line 472 + if len(descriptions) == 0: # line 473 + continue # line 473 + print(" %s%s %s%s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxoptlen + (0 if len(optname) > 1 else 1)), descriptions[0], "\n" + pure.ajoin(" " * (6 + maxoptlen + (2 if len(optname) > 1 else 1)), descriptions[1:], nl="\n") if len(descriptions) > 1 else "")) # line 474 + matchingoptions = [] if cmd is None else [(optname, pure.splitStrip(dikt[None]) if None in dikt else []) for optname, dikt in OPTIONS.items()] # add all text for the generic description # line 475 + if matchingoptions: # line 476 + print("\n Common options:") # line 477 + maxoptlen = max([len(optname) for optname, __ in matchingoptions]) # line 478 + for optname, descriptions in sorted(matchingoptions): # line 479 + if len(descriptions) == 0: # line 480 + continue # line 480 + print(" %s%s %s%s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxoptlen + (0 if len(optname) > 1 else 1)), descriptions[0], "\n" + pure.ajoin(" " * (6 + maxoptlen + (2 if len(optname) > 1 else 1)), descriptions[1:], nl="\n") if len(descriptions) > 1 else "")) # line 481 + if command is None: # line 482 + print("\nCommon options:") # line 483 + genericOptions = {k: v[None] for k, v in OPTIONS.items() if None in v} # type: Dict[str, str] # line 484 + maxlen = max((len(_) for _ in genericOptions)) # line 485 + for optname, description in sorted(genericOptions.items()): # line 486 + print(" %s%s %s" % ("--" if len(optname) > 1 else "-", pure.ljust(optname, maxlen), pure.ajoin(" " * (2 + 2 + maxlen + 2), pure.splitStrip(description), nl="\n", first=False))) # line 487 # TODO wrap text at terminal boundaries automatically, if space suffices # [][/] Revision string. Branch is optional (defaulting to current branch) and may be a label or number >= 0 # Revision is an optional integer and may be negative to reference from the latest commits (-1 is most recent revision), or a tag name""" - sys.exit(0) # line 489 + sys.exit(0) # line 492 diff --git a/sos/utility.coco b/sos/utility.coco index 35e71b6..b6f7d52 100644 --- a/sos/utility.coco +++ b/sos/utility.coco @@ -100,6 +100,7 @@ MULT_SYMBOL:str = "\u00d7" CROSS_SYMBOL:str = "\u2716" CHECKMARK_SYMBOL:str = "\u2714" PLUSMINUS_SYMBOL:str = "\u00b1" # alternative for "~" +ARROW_SYMBOL:str = "\u2799" # alternative for "*" in "this revision" MOVE_SYMBOL:str = "\u21cc" # alternative for "#". or use \U0001F5C0", which is very unlikely to be in any console font METADATA_FORMAT:int = 1 # counter for incompatible consecutive formats (was undefined, "1" is the first versioned version after that) vcsFolders:Dict[str,str] = {".svn": SVN, ".git": "git", ".bzr": "bzr", ".hg": "hg", ".fslckout": "fossil", "_FOSSIL_": "fossil", ".CVS": "cvs", "_darcs": "darcs", "_MTN": "monotone", ".git/GL_COMMIT_EDIT_MSG": "gl"} diff --git a/sos/utility.py b/sos/utility.py index 0d1f516..9dd5657 100644 --- a/sos/utility.py +++ b/sos/utility.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# __coconut_hash__ = 0x1834478 +# __coconut_hash__ = 0x397bd66b # Compiled with Coconut version 1.3.1-post_dev28 [Dead Parrot] @@ -178,594 +178,595 @@ def error(_, *s): # line 76 CROSS_SYMBOL = "\u2716" # type: str # line 100 CHECKMARK_SYMBOL = "\u2714" # type: str # line 101 PLUSMINUS_SYMBOL = "\u00b1" # type: str # alternative for "~" # line 102 -MOVE_SYMBOL = "\u21cc" # type: str # alternative for "#". or use \U0001F5C0", which is very unlikely to be in any console font # line 103 -METADATA_FORMAT = 1 # type: int # counter for incompatible consecutive formats (was undefined, "1" is the first versioned version after that) # line 104 -vcsFolders = {".svn": SVN, ".git": "git", ".bzr": "bzr", ".hg": "hg", ".fslckout": "fossil", "_FOSSIL_": "fossil", ".CVS": "cvs", "_darcs": "darcs", "_MTN": "monotone", ".git/GL_COMMIT_EDIT_MSG": "gl"} # type: Dict[str, str] # line 105 -vcsBranches = {SVN: "trunk", "git": "master", "bzr": "trunk", "hg": "default", "fossil": None, "cvs": None, "darcs": None, "monotone": None} # type: Dict[str, _coconut.typing.Optional[str]] # line 106 -vcsCommits = {SVN: (True, None), "git": (False, None), "bzr": (True, None), "hg": (True, None), "fossil": (True, "--no-warnings"), "cvs": (True, None), "darcs": (False, "--all"), "monotone": (False, None)} # type: Dict[str, Tuple[bool, _coconut.typing.Optional[str]]] # bool: tracked? (otherwise picky), str:arguments to "commit" TODO CVS, RCS have probably different per-file operation # line 107 -vcsNames = {SVN: "Subversion", "git": "Git", "bzr": "Bazaar", "hg": "Mercurial", "fossil": "Fossil", "cvs": "CVS", "darcs": "darcs", "monotone": "monotone"} # type: Dict[str, str] # from cmd to long name # line 108 -NL_NAMES = {None: "", b"\r\n": "", b"\n\r": "", b"\n": "", b"\r": ""} # type: Dict[bytes, str] # line 109 -MAX_COMMAND_LINE = {"win32": 8191, "linux2": 4096, None: 1023} # type: Dict[str, int] # may be much longer on posix. https://stackoverflow.com/questions/3205027/maximum-length-of-command-line-string # line 110 -defaults = Accessor({"strict": False, "track": False, "picky": False, "compress": False, "useChangesCommand": False, "useUnicodeFont": sys.platform != "win32", "logLines": 20, "texttype": ["*.md", "*.coco", "*.py", "*.pyi", "*.pth"], "bintype": [], "ignoreDirs": [".*", "__pycache__", ".mypy_cache"], "ignoreDirsWhitelist": [], "ignores": ["__coconut__.py", "*.bak", "*.py[cdo]", "*.class", ".fslckout", "_FOSSIL_", "*%s" % DUMP_FILE], "ignoresWhitelist": []}) # type: Accessor # line 111 +ARROW_SYMBOL = "\u2799" # type: str # alternative for "*" in "this revision" # line 103 +MOVE_SYMBOL = "\u21cc" # type: str # alternative for "#". or use \U0001F5C0", which is very unlikely to be in any console font # line 104 +METADATA_FORMAT = 1 # type: int # counter for incompatible consecutive formats (was undefined, "1" is the first versioned version after that) # line 105 +vcsFolders = {".svn": SVN, ".git": "git", ".bzr": "bzr", ".hg": "hg", ".fslckout": "fossil", "_FOSSIL_": "fossil", ".CVS": "cvs", "_darcs": "darcs", "_MTN": "monotone", ".git/GL_COMMIT_EDIT_MSG": "gl"} # type: Dict[str, str] # line 106 +vcsBranches = {SVN: "trunk", "git": "master", "bzr": "trunk", "hg": "default", "fossil": None, "cvs": None, "darcs": None, "monotone": None} # type: Dict[str, _coconut.typing.Optional[str]] # line 107 +vcsCommits = {SVN: (True, None), "git": (False, None), "bzr": (True, None), "hg": (True, None), "fossil": (True, "--no-warnings"), "cvs": (True, None), "darcs": (False, "--all"), "monotone": (False, None)} # type: Dict[str, Tuple[bool, _coconut.typing.Optional[str]]] # bool: tracked? (otherwise picky), str:arguments to "commit" TODO CVS, RCS have probably different per-file operation # line 108 +vcsNames = {SVN: "Subversion", "git": "Git", "bzr": "Bazaar", "hg": "Mercurial", "fossil": "Fossil", "cvs": "CVS", "darcs": "darcs", "monotone": "monotone"} # type: Dict[str, str] # from cmd to long name # line 109 +NL_NAMES = {None: "", b"\r\n": "", b"\n\r": "", b"\n": "", b"\r": ""} # type: Dict[bytes, str] # line 110 +MAX_COMMAND_LINE = {"win32": 8191, "linux2": 4096, None: 1023} # type: Dict[str, int] # may be much longer on posix. https://stackoverflow.com/questions/3205027/maximum-length-of-command-line-string # line 111 +defaults = Accessor({"strict": False, "track": False, "picky": False, "compress": False, "useChangesCommand": False, "useUnicodeFont": sys.platform != "win32", "logLines": 20, "texttype": ["*.md", "*.coco", "*.py", "*.pyi", "*.pth"], "bintype": [], "ignoreDirs": [".*", "__pycache__", ".mypy_cache"], "ignoreDirsWhitelist": [], "ignores": ["__coconut__.py", "*.bak", "*.py[cdo]", "*.class", ".fslckout", "_FOSSIL_", "*%s" % DUMP_FILE], "ignoresWhitelist": []}) # type: Accessor # line 112 # Functions -def printo(s: 'str'="", nl: 'str'="\n"): # PEP528 compatibility # line 124 - tryOrDefault(lambda _=None: (lambda _coconut_none_coalesce_item: sys.stdout if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(sys.stdout.buffer), sys.stdout).write((s + nl).encode(sys.stdout.encoding, 'backslashreplace')) # PEP528 compatibility # line 124 - sys.stdout.flush() # PEP528 compatibility # line 124 -def printe(s: 'str'="", nl: 'str'="\n"): # line 125 - tryOrDefault(lambda _=None: (lambda _coconut_none_coalesce_item: sys.stderr if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(sys.stderr.buffer), sys.stderr).write((s + nl).encode(sys.stderr.encoding, 'backslashreplace')) # line 125 - sys.stderr.flush() # line 125 -@_coconut_tco # for py->os access of writing filenames # PEP 529 compatibility # line 126 -def encode(s: 'str') -> 'bytes': # for py->os access of writing filenames # PEP 529 compatibility # line 126 - return _coconut_tail_call(os.fsencode, s) # for py->os access of writing filenames # PEP 529 compatibility # line 126 -@_coconut_tco # for os->py access of reading filenames # line 127 -def decode(b: 'bytes') -> 'str': # for os->py access of reading filenames # line 127 - return _coconut_tail_call(os.fsdecode, b) # for os->py access of reading filenames # line 127 -try: # line 128 - import chardet # https://github.com/chardet/chardet # line 129 - def detectEncoding(binary: 'bytes') -> 'str': # line 130 - return chardet.detect(binary)["encoding"] # line 130 -except: # Guess the encoding # line 131 - def detectEncoding(binary: 'bytes') -> 'str': # Guess the encoding # line 131 - ''' Fallback if chardet library missing. ''' # line 132 - try: # line 133 - binary.decode(UTF8) # line 133 - return UTF8 # line 133 - except UnicodeError: # line 134 - pass # line 134 - try: # line 135 - binary.decode("utf_16") # line 135 - return "utf_16" # line 135 - except UnicodeError: # line 136 - pass # line 136 - try: # line 137 - binary.decode("cp1252") # line 137 - return "cp1252" # line 137 - except UnicodeError: # line 138 - pass # line 138 - return "ascii" # this code will never be reached, as above is an 8-bit charset that always matches # line 139 - -def tryOrDefault(func: '_coconut.typing.Callable[..., Any]', default: 'Any') -> 'Any': # line 141 - try: # line 142 - return func() # line 142 - except: # line 143 - return default # line 143 - -def tryOrIgnore(func: '_coconut.typing.Callable[..., Any]', onError: '_coconut.typing.Callable[[Exception], None]'=lambda _=None: None) -> 'Any': # line 145 - try: # line 146 - return func() # line 146 - except Exception as E: # line 147 - onError(E) # line 147 - -def removePath(key: 'str', value: 'str') -> 'str': # line 149 - ''' Cleanup of user-specified global file patterns. ''' # TODO improve # line 150 - return value if value in GLOBAL_LISTS or SLASH not in value else value[value.rindex(SLASH) + 1:] # line 151 - -def dictUpdate(dikt: 'Dict[Any, Any]', by: 'Dict[Any, Any]') -> 'Dict[Any, Any]': # line 153 - d = {} # type: Dict[Any, Any] # line 153 - d.update(dikt) # line 153 - d.update(by) # line 153 - return d # line 153 - -def openIt(file: 'str', mode: 'str', compress: 'bool'=False) -> 'IO[bytes]': # Abstraction for opening both compressed and plain files # line 155 - return bz2.BZ2File(encode(file), mode) if compress else open(encode(file), mode + "b") # Abstraction for opening both compressed and plain files # line 155 - -def eoldet(file: 'bytes') -> '_coconut.typing.Optional[bytes]': # line 157 - ''' Determine EOL style from a binary string. ''' # line 158 - lf = file.count(b"\n") # type: int # line 159 - cr = file.count(b"\r") # type: int # line 160 - crlf = file.count(b"\r\n") # type: int # line 161 - if crlf > 0: # DOS/Windows/Symbian etc. # line 162 - if lf != crlf or cr != crlf: # line 163 - warn("Inconsistent CR/NL count with CR+NL. Mixed EOL style detected, may cause problems during merge") # line 163 - return b"\r\n" # line 164 - if lf != 0 and cr != 0: # line 165 - warn("Inconsistent CR/NL count without CR+NL. Mixed EOL style detected, may cause problems during merge") # line 165 - if lf > cr: # Linux/Unix # line 166 - return b"\n" # Linux/Unix # line 166 - if cr > lf: # older 8-bit machines # line 167 - return b"\r" # older 8-bit machines # line 167 - return None # no new line contained, cannot determine # line 168 - -if TYPE_CHECKING: # line 170 - Splittable = TypeVar("Splittable", AnyStr) # line 171 - def safeSplit(s: 'Splittable', d: '_coconut.typing.Optional[Splittable]'=None) -> 'List[Splittable]': # line 172 - return s.split((("\n" if isinstance(s, str) else b"\n") if d is None else d)) if len(s) > 0 else [] # line 172 -else: # line 173 - def safeSplit(s, d=None): # line 174 - return s.split((("\n" if isinstance(s, str) else b"\n") if d is None else d)) if len(s) > 0 else [] # line 174 - -@_coconut_tco # line 176 -def hashStr(datas: 'str') -> 'str': # line 176 - return _coconut_tail_call(hashlib.sha256(datas.encode(UTF8)).hexdigest) # line 176 - -def modified(changes: 'ChangeSet', onlyBinary: 'bool'=False) -> 'bool': # line 178 - return len(changes.additions) > 0 or len(changes.deletions) > 0 or len(changes.modifications) > 0 or len(changes.moves) > 0 # line 178 - -def listindex(lizt: 'Sequence[Any]', what: 'Any', index: 'int'=0) -> 'int': # line 180 - return lizt[index:].index(what) + index # line 180 - -def branchFolder(branch: 'int', base: '_coconut.typing.Optional[str]'=None, file: '_coconut.typing.Optional[str]'=None) -> 'str': # line 182 - return os.path.join((os.getcwd() if base is None else base), metaFolder, "b%d" % branch) + ((os.sep + file) if file else "") # line 182 - -def revisionFolder(branch: 'int', revision: 'int', base: '_coconut.typing.Optional[str]'=None, file: '_coconut.typing.Optional[str]'=None) -> 'str': # line 184 - return os.path.join(branchFolder(branch, base), "r%d" % revision) + ((os.sep + file) if file else "") # line 184 - -def Exit(message: 'str'="", code=1): # line 186 - printe("[EXIT%s]" % (" %.1fs" % (time.time() - START_TIME) if verbose else "") + (" " + message + "." if message != "" else "")) # line 186 - sys.exit(code) # line 186 - -def fitStrings(strings: '_coconut.typing.Sequence[str]', prefix: 'str', length: 'int'=MAX_COMMAND_LINE.get(sys.platform, MAX_COMMAND_LINE[None]), separator: 'str'=" ", process: '_coconut.typing.Callable[..., str]'=lambda _=None: '"%s"' % _) -> 'str': # line 188 - ''' Returns a packed string, destructively consuming entries from the provided list. Does similar to xargs. getconf ARG_MAX or xargs --show-limits. ''' # line 189 - if len(prefix + separator + ((process)(strings[0]))) > length: # line 190 - raise Exception("Cannot possibly strings pack into specified length") # line 190 - while len(strings) > 0 and len(prefix + separator + ((process)(strings[0]))) <= length: # line 191 - prefix += separator + ((process)(strings.pop(0))) # line 191 - return prefix # line 192 - -def exception(E): # line 194 - ''' Report an exception to the user to enable useful bug reporting. ''' # line 195 - printo(str(E)) # line 196 - import traceback # line 197 - traceback.print_exc() # line 198 - traceback.print_stack() # line 199 - -def hashFile(path: 'str', compress: 'bool', saveTo: '_coconut.typing.Optional[str]'=None, callback: 'Optional[_coconut.typing.Callable[[str], None]]'=None, symbols: 'str'=PROGRESS_MARKER[0]) -> 'Tuple[str, int]': # line 201 - ''' Calculate hash of file contents, and return compressed sized, if in write mode, or zero. ''' # line 202 - indicator = ProgressIndicator(symbols, callback) if callback else None # type: _coconut.typing.Optional[ProgressIndicator] # line 203 - _hash = hashlib.sha256() # line 204 - wsize = 0 # type: int # line 205 - if saveTo and os.path.exists(encode(saveTo)): # line 206 - Exit("Hash conflict. Leaving revision in inconsistent state. This should happen only once in a lifetime") # line 206 - to = openIt(saveTo, "w", compress) if saveTo else None # line 207 - with open(encode(path), "rb") as fd: # line 208 - while True: # line 209 - buffer = fd.read(bufSize) # type: bytes # line 210 - _hash.update(buffer) # line 211 - if to: # line 212 - to.write(buffer) # line 212 - if len(buffer) < bufSize: # line 213 - break # line 213 - if indicator: # line 214 - indicator.getIndicator() # line 214 - if to: # line 215 - to.close() # line 216 - wsize = os.stat(encode(saveTo)).st_size # line 217 - return (_hash.hexdigest(), wsize) # line 218 - -def getAnyOfMap(map: 'Dict[str, Any]', params: '_coconut.typing.Sequence[str]', default: 'Any'=None) -> 'Any': # line 220 - ''' Utility to find any entries of a dictionary in a list to return the dictionaries value. ''' # line 221 - for k, v in map.items(): # line 222 - if k in params: # line 222 - return v # line 222 - return default # line 223 - -@_coconut_tco # line 225 -def strftime(timestamp: '_coconut.typing.Optional[int]'=None) -> 'str': # line 225 - return _coconut_tail_call(time.strftime, "%Y-%m-%d %H:%M:%S", time.localtime(timestamp / 1000. if timestamp is not None else None)) # line 225 - -def detectAndLoad(filename: '_coconut.typing.Optional[str]'=None, content: '_coconut.typing.Optional[bytes]'=None, ignoreWhitespace: 'bool'=False) -> 'Tuple[str, bytes, _coconut.typing.Sequence[str]]': # line 227 - lines = [] # type: _coconut.typing.Sequence[str] # line 228 - if filename is not None: # line 229 - with open(encode(filename), "rb") as fd: # line 229 - content = fd.read() # line 229 - encoding = (lambda _coconut_none_coalesce_item: sys.getdefaultencoding() if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(detectEncoding(content)) # type: str # line 230 - eol = eoldet(content) # type: _coconut.typing.Optional[bytes] # line 231 - if filename is not None: # line 232 - with codecs.open(encode(filename), encoding=encoding) as fd2: # line 232 - lines = safeSplit(fd2.read(), ((b"\n" if eol is None else eol)).decode(encoding)) # line 232 - elif content is not None: # line 233 - lines = safeSplit(content.decode(encoding), ((b"\n" if eol is None else eol)).decode(encoding)) # line 233 - else: # line 234 - return (sys.getdefaultencoding(), b"\n", []) # line 234 - if ignoreWhitespace: # line 235 - lines[:] = [line.replace("\t", " ").strip() for line in lines] # line 235 - return (encoding, eol, lines) # line 236 - -if TYPE_CHECKING: # line 238 - DataType = TypeVar("DataType", BranchInfo, ChangeSet, MergeBlock, PathInfo) # line 239 - @_coconut_tco # line 240 - def dataCopy(_tipe: 'Type[DataType]', _old: 'DataType', *_args, byValue: 'bool'=False, **_kwargs) -> 'DataType': # line 240 - ''' A better makedata() version. ''' # line 241 - r = _old._asdict() # type: Dict[str, Any] # line 242 - r.update({k: ([e for e in v] if byValue and isinstance(v, (list, tuple, set)) else v) for k, v in _kwargs.items()}) # copy by value if required # line 243 - return _coconut_tail_call(makedata, _tipe, *(list(_args) + [r[field] for field in _old._fields])) # TODO also offer copy-by-value here # line 244 -else: # line 245 - @_coconut_tco # line 246 - def dataCopy(_tipe, _old, *_args, byValue=False, **_kwargs) -> 'DataType': # line 246 - ''' A better makedata() version. ''' # line 247 - r = _old._asdict() # line 248 - r.update({k: ([e for e in v] if byValue and isinstance(v, (list, tuple, set)) else v) for k, v in _kwargs.items()}) # copy by value if required # line 249 - return _coconut_tail_call(makedata, _tipe, *(list(_args) + [r[field] for field in _old._fields])) # TODO also offer copy-by-value here # line 250 - -def detectMoves(changes: 'ChangeSet') -> 'Dict[str, Tuple[str, PathInfo]]': # line 252 - ''' Compute renames/removes for a changeset. ''' # line 253 - moves = {} # type: Dict[str, Tuple[str, PathInfo]] # line 254 - for path, info in changes.additions.items(): # line 255 - for dpath, dinfo in changes.deletions.items(): # line 255 - if info.size == dinfo.size and info.mtime == dinfo.mtime and info.hash == dinfo.hash: # was moved TODO check either mtime or hash? # line 256 - moves[path] = (dpath, info) # store new data and original name, but don't remove add/del # line 257 - break # deletions loop, continue with next addition # line 258 - return moves # line 259 - -def user_input(text: 'str', choices: 'Iterable[str]', default: 'str'=None, selection: 'str'="") -> 'str': # line 261 - ''' Default can be a selection from choice and allows empty input. ''' # line 262 - while True: # line 263 - selection = input(text).strip().lower() # line 264 - if selection != "" and selection in choices: # line 265 - break # line 265 - if selection == "" and default is not None: # line 266 - selection = default # line 266 +def printo(s: 'str'="", nl: 'str'="\n"): # PEP528 compatibility # line 125 + tryOrDefault(lambda _=None: (lambda _coconut_none_coalesce_item: sys.stdout if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(sys.stdout.buffer), sys.stdout).write((s + nl).encode(sys.stdout.encoding, 'backslashreplace')) # PEP528 compatibility # line 125 + sys.stdout.flush() # PEP528 compatibility # line 125 +def printe(s: 'str'="", nl: 'str'="\n"): # line 126 + tryOrDefault(lambda _=None: (lambda _coconut_none_coalesce_item: sys.stderr if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(sys.stderr.buffer), sys.stderr).write((s + nl).encode(sys.stderr.encoding, 'backslashreplace')) # line 126 + sys.stderr.flush() # line 126 +@_coconut_tco # for py->os access of writing filenames # PEP 529 compatibility # line 127 +def encode(s: 'str') -> 'bytes': # for py->os access of writing filenames # PEP 529 compatibility # line 127 + return _coconut_tail_call(os.fsencode, s) # for py->os access of writing filenames # PEP 529 compatibility # line 127 +@_coconut_tco # for os->py access of reading filenames # line 128 +def decode(b: 'bytes') -> 'str': # for os->py access of reading filenames # line 128 + return _coconut_tail_call(os.fsdecode, b) # for os->py access of reading filenames # line 128 +try: # line 129 + import chardet # https://github.com/chardet/chardet # line 130 + def detectEncoding(binary: 'bytes') -> 'str': # line 131 + return chardet.detect(binary)["encoding"] # line 131 +except: # Guess the encoding # line 132 + def detectEncoding(binary: 'bytes') -> 'str': # Guess the encoding # line 132 + ''' Fallback if chardet library missing. ''' # line 133 + try: # line 134 + binary.decode(UTF8) # line 134 + return UTF8 # line 134 + except UnicodeError: # line 135 + pass # line 135 + try: # line 136 + binary.decode("utf_16") # line 136 + return "utf_16" # line 136 + except UnicodeError: # line 137 + pass # line 137 + try: # line 138 + binary.decode("cp1252") # line 138 + return "cp1252" # line 138 + except UnicodeError: # line 139 + pass # line 139 + return "ascii" # this code will never be reached, as above is an 8-bit charset that always matches # line 140 + +def tryOrDefault(func: '_coconut.typing.Callable[..., Any]', default: 'Any') -> 'Any': # line 142 + try: # line 143 + return func() # line 143 + except: # line 144 + return default # line 144 + +def tryOrIgnore(func: '_coconut.typing.Callable[..., Any]', onError: '_coconut.typing.Callable[[Exception], None]'=lambda _=None: None) -> 'Any': # line 146 + try: # line 147 + return func() # line 147 + except Exception as E: # line 148 + onError(E) # line 148 + +def removePath(key: 'str', value: 'str') -> 'str': # line 150 + ''' Cleanup of user-specified global file patterns. ''' # TODO improve # line 151 + return value if value in GLOBAL_LISTS or SLASH not in value else value[value.rindex(SLASH) + 1:] # line 152 + +def dictUpdate(dikt: 'Dict[Any, Any]', by: 'Dict[Any, Any]') -> 'Dict[Any, Any]': # line 154 + d = {} # type: Dict[Any, Any] # line 154 + d.update(dikt) # line 154 + d.update(by) # line 154 + return d # line 154 + +def openIt(file: 'str', mode: 'str', compress: 'bool'=False) -> 'IO[bytes]': # Abstraction for opening both compressed and plain files # line 156 + return bz2.BZ2File(encode(file), mode) if compress else open(encode(file), mode + "b") # Abstraction for opening both compressed and plain files # line 156 + +def eoldet(file: 'bytes') -> '_coconut.typing.Optional[bytes]': # line 158 + ''' Determine EOL style from a binary string. ''' # line 159 + lf = file.count(b"\n") # type: int # line 160 + cr = file.count(b"\r") # type: int # line 161 + crlf = file.count(b"\r\n") # type: int # line 162 + if crlf > 0: # DOS/Windows/Symbian etc. # line 163 + if lf != crlf or cr != crlf: # line 164 + warn("Inconsistent CR/NL count with CR+NL. Mixed EOL style detected, may cause problems during merge") # line 164 + return b"\r\n" # line 165 + if lf != 0 and cr != 0: # line 166 + warn("Inconsistent CR/NL count without CR+NL. Mixed EOL style detected, may cause problems during merge") # line 166 + if lf > cr: # Linux/Unix # line 167 + return b"\n" # Linux/Unix # line 167 + if cr > lf: # older 8-bit machines # line 168 + return b"\r" # older 8-bit machines # line 168 + return None # no new line contained, cannot determine # line 169 + +if TYPE_CHECKING: # line 171 + Splittable = TypeVar("Splittable", AnyStr) # line 172 + def safeSplit(s: 'Splittable', d: '_coconut.typing.Optional[Splittable]'=None) -> 'List[Splittable]': # line 173 + return s.split((("\n" if isinstance(s, str) else b"\n") if d is None else d)) if len(s) > 0 else [] # line 173 +else: # line 174 + def safeSplit(s, d=None): # line 175 + return s.split((("\n" if isinstance(s, str) else b"\n") if d is None else d)) if len(s) > 0 else [] # line 175 + +@_coconut_tco # line 177 +def hashStr(datas: 'str') -> 'str': # line 177 + return _coconut_tail_call(hashlib.sha256(datas.encode(UTF8)).hexdigest) # line 177 + +def modified(changes: 'ChangeSet', onlyBinary: 'bool'=False) -> 'bool': # line 179 + return len(changes.additions) > 0 or len(changes.deletions) > 0 or len(changes.modifications) > 0 or len(changes.moves) > 0 # line 179 + +def listindex(lizt: 'Sequence[Any]', what: 'Any', index: 'int'=0) -> 'int': # line 181 + return lizt[index:].index(what) + index # line 181 + +def branchFolder(branch: 'int', base: '_coconut.typing.Optional[str]'=None, file: '_coconut.typing.Optional[str]'=None) -> 'str': # line 183 + return os.path.join((os.getcwd() if base is None else base), metaFolder, "b%d" % branch) + ((os.sep + file) if file else "") # line 183 + +def revisionFolder(branch: 'int', revision: 'int', base: '_coconut.typing.Optional[str]'=None, file: '_coconut.typing.Optional[str]'=None) -> 'str': # line 185 + return os.path.join(branchFolder(branch, base), "r%d" % revision) + ((os.sep + file) if file else "") # line 185 + +def Exit(message: 'str'="", code=1): # line 187 + printe("[EXIT%s]" % (" %.1fs" % (time.time() - START_TIME) if verbose else "") + (" " + message + "." if message != "" else "")) # line 187 + sys.exit(code) # line 187 + +def fitStrings(strings: '_coconut.typing.Sequence[str]', prefix: 'str', length: 'int'=MAX_COMMAND_LINE.get(sys.platform, MAX_COMMAND_LINE[None]), separator: 'str'=" ", process: '_coconut.typing.Callable[..., str]'=lambda _=None: '"%s"' % _) -> 'str': # line 189 + ''' Returns a packed string, destructively consuming entries from the provided list. Does similar as xargs. getconf ARG_MAX or xargs --show-limits. ''' # line 190 + if len(prefix + separator + ((process)(strings[0]))) > length: # line 191 + raise Exception("Cannot possibly strings pack into specified length") # line 191 + while len(strings) > 0 and len(prefix + separator + ((process)(strings[0]))) <= length: # line 192 + prefix += separator + ((process)(strings.pop(0))) # line 192 + return prefix # line 193 + +def exception(E): # line 195 + ''' Report an exception to the user to enable useful bug reporting. ''' # line 196 + printo(str(E)) # line 197 + import traceback # line 198 + traceback.print_exc() # line 199 + traceback.print_stack() # line 200 + +def hashFile(path: 'str', compress: 'bool', saveTo: '_coconut.typing.Optional[str]'=None, callback: 'Optional[_coconut.typing.Callable[[str], None]]'=None, symbols: 'str'=PROGRESS_MARKER[0]) -> 'Tuple[str, int]': # line 202 + ''' Calculate hash of file contents, and return compressed sized, if in write mode, or zero. ''' # line 203 + indicator = ProgressIndicator(symbols, callback) if callback else None # type: _coconut.typing.Optional[ProgressIndicator] # line 204 + _hash = hashlib.sha256() # line 205 + wsize = 0 # type: int # line 206 + if saveTo and os.path.exists(encode(saveTo)): # line 207 + Exit("Hash conflict. Leaving revision in inconsistent state. This should happen only once in a lifetime") # line 207 + to = openIt(saveTo, "w", compress) if saveTo else None # line 208 + with open(encode(path), "rb") as fd: # line 209 + while True: # line 210 + buffer = fd.read(bufSize) # type: bytes # line 211 + _hash.update(buffer) # line 212 + if to: # line 213 + to.write(buffer) # line 213 + if len(buffer) < bufSize: # line 214 + break # line 214 + if indicator: # line 215 + indicator.getIndicator() # line 215 + if to: # line 216 + to.close() # line 217 + wsize = os.stat(encode(saveTo)).st_size # line 218 + return (_hash.hexdigest(), wsize) # line 219 + +def getAnyOfMap(map: 'Dict[str, Any]', params: '_coconut.typing.Sequence[str]', default: 'Any'=None) -> 'Any': # line 221 + ''' Utility to find any entries of a dictionary in a list to return the dictionaries value. ''' # line 222 + for k, v in map.items(): # line 223 + if k in params: # line 223 + return v # line 223 + return default # line 224 + +@_coconut_tco # line 226 +def strftime(timestamp: '_coconut.typing.Optional[int]'=None) -> 'str': # line 226 + return _coconut_tail_call(time.strftime, "%Y-%m-%d %H:%M:%S", time.localtime(timestamp / 1000. if timestamp is not None else None)) # line 226 + +def detectAndLoad(filename: '_coconut.typing.Optional[str]'=None, content: '_coconut.typing.Optional[bytes]'=None, ignoreWhitespace: 'bool'=False) -> 'Tuple[str, bytes, _coconut.typing.Sequence[str]]': # line 228 + lines = [] # type: _coconut.typing.Sequence[str] # line 229 + if filename is not None: # line 230 + with open(encode(filename), "rb") as fd: # line 230 + content = fd.read() # line 230 + encoding = (lambda _coconut_none_coalesce_item: sys.getdefaultencoding() if _coconut_none_coalesce_item is None else _coconut_none_coalesce_item)(detectEncoding(content)) # type: str # line 231 + eol = eoldet(content) # type: _coconut.typing.Optional[bytes] # line 232 + if filename is not None: # line 233 + with codecs.open(encode(filename), encoding=encoding) as fd2: # line 233 + lines = safeSplit(fd2.read(), ((b"\n" if eol is None else eol)).decode(encoding)) # line 233 + elif content is not None: # line 234 + lines = safeSplit(content.decode(encoding), ((b"\n" if eol is None else eol)).decode(encoding)) # line 234 + else: # line 235 + return (sys.getdefaultencoding(), b"\n", []) # line 235 + if ignoreWhitespace: # line 236 + lines[:] = [line.replace("\t", " ").strip() for line in lines] # line 236 + return (encoding, eol, lines) # line 237 + +if TYPE_CHECKING: # line 239 + DataType = TypeVar("DataType", BranchInfo, ChangeSet, MergeBlock, PathInfo) # line 240 + @_coconut_tco # line 241 + def dataCopy(_tipe: 'Type[DataType]', _old: 'DataType', *_args, byValue: 'bool'=False, **_kwargs) -> 'DataType': # line 241 + ''' A better makedata() version. ''' # line 242 + r = _old._asdict() # type: Dict[str, Any] # line 243 + r.update({k: ([e for e in v] if byValue and isinstance(v, (list, tuple, set)) else v) for k, v in _kwargs.items()}) # copy by value if required # line 244 + return _coconut_tail_call(makedata, _tipe, *(list(_args) + [r[field] for field in _old._fields])) # TODO also offer copy-by-value here # line 245 +else: # line 246 + @_coconut_tco # line 247 + def dataCopy(_tipe, _old, *_args, byValue=False, **_kwargs) -> 'DataType': # line 247 + ''' A better makedata() version. ''' # line 248 + r = _old._asdict() # line 249 + r.update({k: ([e for e in v] if byValue and isinstance(v, (list, tuple, set)) else v) for k, v in _kwargs.items()}) # copy by value if required # line 250 + return _coconut_tail_call(makedata, _tipe, *(list(_args) + [r[field] for field in _old._fields])) # TODO also offer copy-by-value here # line 251 + +def detectMoves(changes: 'ChangeSet') -> 'Dict[str, Tuple[str, PathInfo]]': # line 253 + ''' Compute renames/removes for a changeset. ''' # line 254 + moves = {} # type: Dict[str, Tuple[str, PathInfo]] # line 255 + for path, info in changes.additions.items(): # line 256 + for dpath, dinfo in changes.deletions.items(): # line 256 + if info.size == dinfo.size and info.mtime == dinfo.mtime and info.hash == dinfo.hash: # was moved TODO check either mtime or hash? # line 257 + moves[path] = (dpath, info) # store new data and original name, but don't remove add/del # line 258 + break # deletions loop, continue with next addition # line 259 + return moves # line 260 + +def user_input(text: 'str', choices: 'Iterable[str]', default: 'str'=None, selection: 'str'="") -> 'str': # line 262 + ''' Default can be a selection from choice and allows empty input. ''' # line 263 + while True: # line 264 + selection = input(text).strip().lower() # line 265 + if selection != "" and selection in choices: # line 266 break # line 266 - return selection # line 267 - -def user_block_input(output: 'List[str]'): # line 269 - ''' Side-effect appending to input list. ''' # line 270 - sep = input("Enter end-of-text marker (default: : ") # type: str # line 271 - line = sep # type: str # line 271 - while True: # line 272 - line = input("> ") # line 273 - if line == sep: # line 274 - break # line 274 - output.append(line) # writes to caller-provided list reference # line 275 - -def merge(file: '_coconut.typing.Optional[bytes]'=None, into: '_coconut.typing.Optional[bytes]'=None, filename: '_coconut.typing.Optional[str]'=None, intoname: '_coconut.typing.Optional[str]'=None, mergeOperation: 'MergeOperation'=MergeOperation.BOTH, charMergeOperation: 'MergeOperation'=MergeOperation.BOTH, diffOnly: 'bool'=False, eol: 'bool'=False, ignoreWhitespace: 'bool'=False) -> 'Tuple[Union[bytes, List[MergeBlock]], _coconut.typing.Optional[bytes]]': # line 277 + if selection == "" and default is not None: # line 267 + selection = default # line 267 + break # line 267 + return selection # line 268 + +def user_block_input(output: 'List[str]'): # line 270 + ''' Side-effect appending to input list. ''' # line 271 + sep = input("Enter end-of-text marker (default: : ") # type: str # line 272 + line = sep # type: str # line 272 + while True: # line 273 + line = input("> ") # line 274 + if line == sep: # line 275 + break # line 275 + output.append(line) # writes to caller-provided list reference # line 276 + +def merge(file: '_coconut.typing.Optional[bytes]'=None, into: '_coconut.typing.Optional[bytes]'=None, filename: '_coconut.typing.Optional[str]'=None, intoname: '_coconut.typing.Optional[str]'=None, mergeOperation: 'MergeOperation'=MergeOperation.BOTH, charMergeOperation: 'MergeOperation'=MergeOperation.BOTH, diffOnly: 'bool'=False, eol: 'bool'=False, ignoreWhitespace: 'bool'=False) -> 'Tuple[Union[bytes, List[MergeBlock]], _coconut.typing.Optional[bytes]]': # line 278 ''' Merges other binary text contents 'file' (or reads file 'filename') into current text contents 'into' (or reads file 'intoname'), returning merged result. For update, the other version is assumed to be the "new/added" one, while for diff, the current changes are the ones "added". However, change direction markers are insert ("+") for elements only in into, and remove ("-") for elements only in other file (just like the diff marks +/-) diffOnly returns detected change blocks only, no text merging eol flag will use the other file's EOL marks in case of replace block and INSERT strategy, the change will be added **behind** the original - ''' # line 292 - encoding = None # type: str # line 293 - othr = None # type: _coconut.typing.Sequence[str] # line 293 - othreol = None # type: _coconut.typing.Optional[bytes] # line 293 - curr = None # type: _coconut.typing.Sequence[str] # line 293 - curreol = None # type: _coconut.typing.Optional[bytes] # line 293 - try: # load files line-wise and normalize line endings (keep the one of the current file) TODO document # line 294 - encoding, othreol, othr = detectAndLoad(filename=filename, content=file, ignoreWhitespace=ignoreWhitespace) # line 295 - encoding, curreol, curr = detectAndLoad(filename=intoname, content=into, ignoreWhitespace=ignoreWhitespace) # line 296 - except Exception as E: # line 297 - Exit("Cannot merge '%s' into '%s': %r" % (filename, intoname, E)) # line 297 - if None not in [othreol, curreol] and othreol != curreol: # line 298 - warn("Differing EOL-styles detected during merge. Using current file's style for merged output") # line 298 - output = list(difflib.Differ().compare(othr, curr)) # type: List[str] # from generator expression # line 299 - blocks = [] # type: List[MergeBlock] # merged result in blocks # line 300 - tmp = [] # type: List[str] # block lines # line 301 - last = " " # type: str # "into"-file offset for remark lines # line 302 - no = None # type: int # "into"-file offset for remark lines # line 302 - line = None # type: str # "into"-file offset for remark lines # line 302 - offset = 0 # type: int # "into"-file offset for remark lines # line 302 - for no, line in enumerate(output + ["X"]): # EOF marker (difflib's output will never be "X" alone) # line 303 - if line[0] == last: # continue filling current block, no matter what type of block it is # line 304 - tmp.append(line[2:]) # continue filling current block, no matter what type of block it is # line 304 - continue # continue filling current block, no matter what type of block it is # line 304 - if line == "X" and len(tmp) == 0: # break if nothing left to do, otherwise perform operation for stored block # line 305 - break # break if nothing left to do, otherwise perform operation for stored block # line 305 - if last == " ": # block is same in both files # line 306 - if len(tmp) > 0: # avoid adding empty keep block # line 307 - blocks.append(MergeBlock(MergeBlockType.KEEP, [line for line in tmp], line=no - offset - len(tmp))) # avoid adding empty keep block # line 307 - elif last == "-": # may be a pure deletion or part of a replacement (with next block being "+") # line 308 - blocks.append(MergeBlock(MergeBlockType.REMOVE, [line for line in tmp], line=no - offset - len(tmp))) # line 309 - if len(blocks) >= 2 and blocks[-2].tipe == MergeBlockType.INSERT: # line 310 - offset += len(blocks[-2].lines) # line 311 - blocks[-2] = dataCopy(MergeBlock, blocks[-1], tipe=MergeBlockType.REPLACE, replaces=dataCopy(MergeBlock, blocks[-2], line=blocks[-1].line)) # remember replaced stuff with reference to other merge block TODO why -1 necessary? # line 312 - blocks.pop() # line 313 - elif last == "+": # may be insertion or replacement (with previous - block) # line 314 - blocks.append(MergeBlock(MergeBlockType.INSERT, [line for line in tmp], line=no - offset - len(tmp))) # first, assume simple insertion, then check for replacement # line 315 - if len(blocks) >= 2 and blocks[-2].tipe == MergeBlockType.REMOVE: # and len(blocks[-1].lines) == len(blocks[-2].lines): # requires previous block and same number of lines TODO allow multiple intra-line merge for same-length blocks # line 316 - offset += len(blocks[-1].lines) # line 317 - blocks[-2] = dataCopy(MergeBlock, blocks[-2], tipe=MergeBlockType.REPLACE, replaces=dataCopy(MergeBlock, blocks[-1], line=blocks[-2].line)) # remember replaced stuff with reference to other merge block TODO why -1 necessary? # line 318 - blocks.pop() # remove TOS due to merging two blocks into replace or modify # line 319 - elif last == "?": # marker for intra-line change comment HINT was earlier part of the MergeBlock # line 320 - offset += 1 # marker for intra-line change comment HINT was earlier part of the MergeBlock # line 320 - last = line[0] # line 321 - tmp[:] = [line[2:]] # only keep current line for next block # line 322 + ''' # line 293 + encoding = None # type: str # line 294 + othr = None # type: _coconut.typing.Sequence[str] # line 294 + othreol = None # type: _coconut.typing.Optional[bytes] # line 294 + curr = None # type: _coconut.typing.Sequence[str] # line 294 + curreol = None # type: _coconut.typing.Optional[bytes] # line 294 + try: # load files line-wise and normalize line endings (keep the one of the current file) TODO document # line 295 + encoding, othreol, othr = detectAndLoad(filename=filename, content=file, ignoreWhitespace=ignoreWhitespace) # line 296 + encoding, curreol, curr = detectAndLoad(filename=intoname, content=into, ignoreWhitespace=ignoreWhitespace) # line 297 + except Exception as E: # line 298 + Exit("Cannot merge '%s' into '%s': %r" % (filename, intoname, E)) # line 298 + if None not in [othreol, curreol] and othreol != curreol: # line 299 + warn("Differing EOL-styles detected during merge. Using current file's style for merged output") # line 299 + output = list(difflib.Differ().compare(othr, curr)) # type: List[str] # from generator expression # line 300 + blocks = [] # type: List[MergeBlock] # merged result in blocks # line 301 + tmp = [] # type: List[str] # block lines # line 302 + last = " " # type: str # "into"-file offset for remark lines # line 303 + no = None # type: int # "into"-file offset for remark lines # line 303 + line = None # type: str # "into"-file offset for remark lines # line 303 + offset = 0 # type: int # "into"-file offset for remark lines # line 303 + for no, line in enumerate(output + ["X"]): # EOF marker (difflib's output will never be "X" alone) # line 304 + if line[0] == last: # continue filling current block, no matter what type of block it is # line 305 + tmp.append(line[2:]) # continue filling current block, no matter what type of block it is # line 305 + continue # continue filling current block, no matter what type of block it is # line 305 + if line == "X" and len(tmp) == 0: # break if nothing left to do, otherwise perform operation for stored block # line 306 + break # break if nothing left to do, otherwise perform operation for stored block # line 306 + if last == " ": # block is same in both files # line 307 + if len(tmp) > 0: # avoid adding empty keep block # line 308 + blocks.append(MergeBlock(MergeBlockType.KEEP, [line for line in tmp], line=no - offset - len(tmp))) # avoid adding empty keep block # line 308 + elif last == "-": # may be a pure deletion or part of a replacement (with next block being "+") # line 309 + blocks.append(MergeBlock(MergeBlockType.REMOVE, [line for line in tmp], line=no - offset - len(tmp))) # line 310 + if len(blocks) >= 2 and blocks[-2].tipe == MergeBlockType.INSERT: # line 311 + offset += len(blocks[-2].lines) # line 312 + blocks[-2] = dataCopy(MergeBlock, blocks[-1], tipe=MergeBlockType.REPLACE, replaces=dataCopy(MergeBlock, blocks[-2], line=blocks[-1].line)) # remember replaced stuff with reference to other merge block TODO why -1 necessary? # line 313 + blocks.pop() # line 314 + elif last == "+": # may be insertion or replacement (with previous - block) # line 315 + blocks.append(MergeBlock(MergeBlockType.INSERT, [line for line in tmp], line=no - offset - len(tmp))) # first, assume simple insertion, then check for replacement # line 316 + if len(blocks) >= 2 and blocks[-2].tipe == MergeBlockType.REMOVE: # and len(blocks[-1].lines) == len(blocks[-2].lines): # requires previous block and same number of lines TODO allow multiple intra-line merge for same-length blocks # line 317 + offset += len(blocks[-1].lines) # line 318 + blocks[-2] = dataCopy(MergeBlock, blocks[-2], tipe=MergeBlockType.REPLACE, replaces=dataCopy(MergeBlock, blocks[-1], line=blocks[-2].line)) # remember replaced stuff with reference to other merge block TODO why -1 necessary? # line 319 + blocks.pop() # remove TOS due to merging two blocks into replace or modify # line 320 + elif last == "?": # marker for intra-line change comment HINT was earlier part of the MergeBlock # line 321 + offset += 1 # marker for intra-line change comment HINT was earlier part of the MergeBlock # line 321 + last = line[0] # line 322 + tmp[:] = [line[2:]] # only keep current line for next block # line 323 # TODO add code to detect block moved blocks here - nl = othreol if eol else ((othreol if curreol is None else curreol)) # type: bytes # no default newline, to mark "no newline" # line 324 - debug("Diff blocks: " + repr(blocks)) # line 325 - if diffOnly: # line 326 - return (blocks, nl) # line 326 + nl = othreol if eol else ((othreol if curreol is None else curreol)) # type: bytes # no default newline, to mark "no newline" # line 325 + debug("Diff blocks: " + repr(blocks)) # line 326 + if diffOnly: # line 327 + return (blocks, nl) # line 327 # now perform merge operations depending on detected blocks - output[:] = [] # clean list of strings # line 329 - add_all = None # type: _coconut.typing.Optional[str] # clean list of strings # line 329 - del_all = None # type: _coconut.typing.Optional[str] # clean list of strings # line 329 - selection = None # type: str # clean list of strings # line 329 - for block in blocks: # line 330 - if block.tipe == MergeBlockType.KEEP: # line 331 - output.extend(block.lines) # line 331 - elif (block.tipe == MergeBlockType.INSERT and not (mergeOperation.value & MergeOperation.REMOVE.value)) or (block.tipe == MergeBlockType.REMOVE and (mergeOperation.value & MergeOperation.INSERT.value)): # will add line despite remove if --add-line was selected # line 332 - output.extend(block.lines) # line 334 - elif block.tipe == MergeBlockType.REPLACE: # complete block replacement # line 335 - if len(block.lines) == len(block.replaces.lines) == 1: # one-liner # line 336 - output.append(lineMerge(block.lines[0], block.replaces.lines[0], mergeOperation=charMergeOperation)) # line 337 - elif mergeOperation == MergeOperation.ASK: # more than one line: needs user input # line 338 - printo(pure.ajoin("- ", block.lines, nl="\n")) # TODO check +/- in update mode, could be swapped # line 339 - printo(pure.ajoin("+ ", block.replaces.lines, nl="\n")) # line 340 - while True: # line 341 - op = input(" Line replacement: *M[I]ne (+), [T]heirs (-), [B]oth, [U]ser input: ").strip().lower()[:1] # type: str # line 342 - if op in "tb": # line 343 - output.extend(block.lines) # line 343 - if op in "ib": # line 344 - output.extend(block.replaces.lines) # line 344 - if op == "u": # line 345 - user_block_input(output) # line 345 - if op in "tbiu": # line 346 - break # line 346 - else: # more than one line and not ask # line 347 - if mergeOperation == MergeOperation.REMOVE: # line 348 - pass # line 348 - elif mergeOperation == MergeOperation.BOTH: # line 349 - output.extend(block.lines) # line 349 - elif mergeOperation == MergeOperation.INSERT: # TODO optionally allow insertion BEFORE or AFTER original (order of these both lines) # line 350 - output.extend(list(block.replaces.lines) + list(block.lines)) # TODO optionally allow insertion BEFORE or AFTER original (order of these both lines) # line 350 - elif block.tipe in (MergeBlockType.INSERT, MergeBlockType.REMOVE) and mergeOperation == MergeOperation.ASK: # user - interactive insert/remove section # line 351 - if (block.tipe == MergeBlockType.INSERT and add_all is None) or (block.tipe == MergeOperation.REMOVE and del_all is None): # condition for asking # line 352 - selection = user_input(pure.ajoin("+ " if block.tipe == MergeBlockType.INSERT else "- ", block.lines) + "\n Accept? *[Y]es, [N]o, yes to [A]ll %s, n[O] to all: " % "insertions" if block.tipe == MergeBlockType.INSERT else "deletions", "ynao", "y") # line 354 - if selection in "ao": # line 355 - if block.tipe == MergeBlockType.INSERT: # line 356 - add_all = "y" if selection == "a" else "n" # line 356 - selection = add_all # line 356 - else: # REMOVE case # line 357 - del_all = "y" if selection == "a" else "n" # REMOVE case # line 357 - selection = del_all # REMOVE case # line 357 - if (block.tipe == MergeBlockType.INSERT and "y" in (add_all, selection)) or ("n" in (del_all, selection)): # REMOVE case # line 358 - output.extend(block.lines) # line 360 - debug("Merge output: " + "; ".join(output)) # line 361 - return (((b"\n" if nl is None else nl)).join([line.encode(encoding) for line in output]), nl) # returning bytes # line 362 + output[:] = [] # clean list of strings # line 330 + add_all = None # type: _coconut.typing.Optional[str] # clean list of strings # line 330 + del_all = None # type: _coconut.typing.Optional[str] # clean list of strings # line 330 + selection = None # type: str # clean list of strings # line 330 + for block in blocks: # line 331 + if block.tipe == MergeBlockType.KEEP: # line 332 + output.extend(block.lines) # line 332 + elif (block.tipe == MergeBlockType.INSERT and not (mergeOperation.value & MergeOperation.REMOVE.value)) or (block.tipe == MergeBlockType.REMOVE and (mergeOperation.value & MergeOperation.INSERT.value)): # will add line despite remove if --add-line was selected # line 333 + output.extend(block.lines) # line 335 + elif block.tipe == MergeBlockType.REPLACE: # complete block replacement # line 336 + if len(block.lines) == len(block.replaces.lines) == 1: # one-liner # line 337 + output.append(lineMerge(block.lines[0], block.replaces.lines[0], mergeOperation=charMergeOperation)) # line 338 + elif mergeOperation == MergeOperation.ASK: # more than one line: needs user input # line 339 + printo(pure.ajoin("- ", block.lines, nl="\n")) # TODO check +/- in update mode, could be swapped # line 340 + printo(pure.ajoin("+ ", block.replaces.lines, nl="\n")) # line 341 + while True: # line 342 + op = input(" Line replacement: *M[I]ne (+), [T]heirs (-), [B]oth, [U]ser input: ").strip().lower()[:1] # type: str # line 343 + if op in "tb": # line 344 + output.extend(block.lines) # line 344 + if op in "ib": # line 345 + output.extend(block.replaces.lines) # line 345 + if op == "u": # line 346 + user_block_input(output) # line 346 + if op in "tbiu": # line 347 + break # line 347 + else: # more than one line and not ask # line 348 + if mergeOperation == MergeOperation.REMOVE: # line 349 + pass # line 349 + elif mergeOperation == MergeOperation.BOTH: # line 350 + output.extend(block.lines) # line 350 + elif mergeOperation == MergeOperation.INSERT: # TODO optionally allow insertion BEFORE or AFTER original (order of these both lines) # line 351 + output.extend(list(block.replaces.lines) + list(block.lines)) # TODO optionally allow insertion BEFORE or AFTER original (order of these both lines) # line 351 + elif block.tipe in (MergeBlockType.INSERT, MergeBlockType.REMOVE) and mergeOperation == MergeOperation.ASK: # user - interactive insert/remove section # line 352 + if (block.tipe == MergeBlockType.INSERT and add_all is None) or (block.tipe == MergeOperation.REMOVE and del_all is None): # condition for asking # line 353 + selection = user_input(pure.ajoin("+ " if block.tipe == MergeBlockType.INSERT else "- ", block.lines) + "\n Accept? *[Y]es, [N]o, yes to [A]ll %s, n[O] to all: " % "insertions" if block.tipe == MergeBlockType.INSERT else "deletions", "ynao", "y") # line 355 + if selection in "ao": # line 356 + if block.tipe == MergeBlockType.INSERT: # line 357 + add_all = "y" if selection == "a" else "n" # line 357 + selection = add_all # line 357 + else: # REMOVE case # line 358 + del_all = "y" if selection == "a" else "n" # REMOVE case # line 358 + selection = del_all # REMOVE case # line 358 + if (block.tipe == MergeBlockType.INSERT and "y" in (add_all, selection)) or ("n" in (del_all, selection)): # REMOVE case # line 359 + output.extend(block.lines) # line 361 + debug("Merge output: " + "; ".join(output)) # line 362 + return (((b"\n" if nl is None else nl)).join([line.encode(encoding) for line in output]), nl) # returning bytes # line 363 # TODO handle check for more/less lines in found -/+ blocks to find common section and splitting prefix/suffix out -@_coconut_tco # line 365 -def lineMerge(othr: 'str', into: 'str', mergeOperation: 'MergeOperation'=MergeOperation.BOTH, diffOnly: 'bool'=False) -> 'Union[str, List[MergeBlock]]': # line 365 +@_coconut_tco # line 366 +def lineMerge(othr: 'str', into: 'str', mergeOperation: 'MergeOperation'=MergeOperation.BOTH, diffOnly: 'bool'=False) -> 'Union[str, List[MergeBlock]]': # line 366 ''' Merges string 'othr' into current string 'into'. change direction mark is insert for elements only in into, and remove for elements only in file (according to diff marks +/-) - ''' # line 368 - out = list(difflib.Differ().compare(othr, into)) # type: List[str] # line 369 - blocks = [] # type: List[MergeBlock] # line 370 - for i, line in enumerate(out): # line 371 - if line[0] == "+": # line 372 - if i + 1 < len(out) and out[i + 1][0] == "+": # block will continue # line 373 - if i > 0 and blocks[-1].tipe == MergeBlockType.INSERT: # middle of + block # line 374 - blocks[-1].lines.append(line[2]) # add one more character to the accumulating list # line 375 - else: # first + in block # line 376 - blocks.append(MergeBlock(MergeBlockType.INSERT, [line[2]], i)) # line 377 - else: # last line of + block # line 378 - if i > 0 and blocks[-1].tipe == MergeBlockType.INSERT: # end of a block # line 379 - blocks[-1].lines.append(line[2]) # line 380 - else: # single line # line 381 - blocks.append(MergeBlock(MergeBlockType.INSERT, [line[2]], i)) # line 382 - if i >= 1 and blocks[-2].tipe == MergeBlockType.REMOVE: # previous - and now last in + block creates a replacement block # line 383 - blocks[-2] = MergeBlock(MergeBlockType.REPLACE, blocks[-2].lines, i, replaces=blocks[-1]) # line 384 - blocks.pop() # line 384 - elif line[0] == "-": # line 385 - if i > 0 and blocks[-1].tipe == MergeBlockType.REMOVE: # part of - block # line 386 - blocks[-1].lines.append(line[2]) # line 387 - else: # first in block # line 388 - blocks.append(MergeBlock(MergeBlockType.REMOVE, [line[2]], i)) # line 389 - elif line[0] == " ": # line 390 - if i > 0 and blocks[-1].tipe == MergeBlockType.KEEP: # part of block # line 391 - blocks[-1].lines.append(line[2]) # line 392 - else: # first in block # line 393 - blocks.append(MergeBlock(MergeBlockType.KEEP, [line[2]], i)) # line 394 - else: # line 395 - raise Exception("Cannot parse diff line %r" % line) # line 395 - blocks[:] = [dataCopy(MergeBlock, block, lines=["".join(block.lines)], replaces=dataCopy(MergeBlock, block.replaces, lines=["".join(block.replaces.lines)]) if block.replaces else None) for block in blocks] # line 396 - if diffOnly: # line 397 - return blocks # line 397 - out[:] = [] # line 398 - for i, block in enumerate(blocks): # line 399 - if block.tipe == MergeBlockType.KEEP: # line 400 - out.extend(block.lines) # line 400 - elif block.tipe == MergeBlockType.REPLACE: # line 401 - if mergeOperation == MergeOperation.ASK: # line 402 - printo(pure.ajoin("- ", othr)) # line 403 - printo("- " + (" " * i) + block.replaces.lines[0]) # line 404 - printo("+ " + (" " * i) + block.lines[0]) # line 405 - printo(pure.ajoin("+ ", into)) # line 406 - op = user_input(" Character replacement: *M[I]ne (+), [T]heirs (-), [B]oth, [U]ser input: ", "tbim") # type: str # line 407 - if op in "tb": # line 408 - out.extend(block.lines) # line 408 - break # line 408 - if op in "ib": # line 409 - out.extend(block.replaces.lines) # line 409 + ''' # line 369 + out = list(difflib.Differ().compare(othr, into)) # type: List[str] # line 370 + blocks = [] # type: List[MergeBlock] # line 371 + for i, line in enumerate(out): # line 372 + if line[0] == "+": # line 373 + if i + 1 < len(out) and out[i + 1][0] == "+": # block will continue # line 374 + if i > 0 and blocks[-1].tipe == MergeBlockType.INSERT: # middle of + block # line 375 + blocks[-1].lines.append(line[2]) # add one more character to the accumulating list # line 376 + else: # first + in block # line 377 + blocks.append(MergeBlock(MergeBlockType.INSERT, [line[2]], i)) # line 378 + else: # last line of + block # line 379 + if i > 0 and blocks[-1].tipe == MergeBlockType.INSERT: # end of a block # line 380 + blocks[-1].lines.append(line[2]) # line 381 + else: # single line # line 382 + blocks.append(MergeBlock(MergeBlockType.INSERT, [line[2]], i)) # line 383 + if i >= 1 and blocks[-2].tipe == MergeBlockType.REMOVE: # previous - and now last in + block creates a replacement block # line 384 + blocks[-2] = MergeBlock(MergeBlockType.REPLACE, blocks[-2].lines, i, replaces=blocks[-1]) # line 385 + blocks.pop() # line 385 + elif line[0] == "-": # line 386 + if i > 0 and blocks[-1].tipe == MergeBlockType.REMOVE: # part of - block # line 387 + blocks[-1].lines.append(line[2]) # line 388 + else: # first in block # line 389 + blocks.append(MergeBlock(MergeBlockType.REMOVE, [line[2]], i)) # line 390 + elif line[0] == " ": # line 391 + if i > 0 and blocks[-1].tipe == MergeBlockType.KEEP: # part of block # line 392 + blocks[-1].lines.append(line[2]) # line 393 + else: # first in block # line 394 + blocks.append(MergeBlock(MergeBlockType.KEEP, [line[2]], i)) # line 395 + else: # line 396 + raise Exception("Cannot parse diff line %r" % line) # line 396 + blocks[:] = [dataCopy(MergeBlock, block, lines=["".join(block.lines)], replaces=dataCopy(MergeBlock, block.replaces, lines=["".join(block.replaces.lines)]) if block.replaces else None) for block in blocks] # line 397 + if diffOnly: # line 398 + return blocks # line 398 + out[:] = [] # line 399 + for i, block in enumerate(blocks): # line 400 + if block.tipe == MergeBlockType.KEEP: # line 401 + out.extend(block.lines) # line 401 + elif block.tipe == MergeBlockType.REPLACE: # line 402 + if mergeOperation == MergeOperation.ASK: # line 403 + printo(pure.ajoin("- ", othr)) # line 404 + printo("- " + (" " * i) + block.replaces.lines[0]) # line 405 + printo("+ " + (" " * i) + block.lines[0]) # line 406 + printo(pure.ajoin("+ ", into)) # line 407 + op = user_input(" Character replacement: *M[I]ne (+), [T]heirs (-), [B]oth, [U]ser input: ", "tbim") # type: str # line 408 + if op in "tb": # line 409 + out.extend(block.lines) # line 409 break # line 409 - if op == "m": # line 410 - user_block_input(out) # line 410 + if op in "ib": # line 410 + out.extend(block.replaces.lines) # line 410 break # line 410 - else: # non-interactive # line 411 - if mergeOperation == MergeOperation.REMOVE: # line 412 - pass # line 412 - elif mergeOperation == MergeOperation.BOTH: # line 413 - out.extend(block.lines) # line 413 - elif mergeOperation == MergeOperation.INSERT: # line 414 - out.extend(list(block.replaces.lines) + list(block.lines)) # line 414 - elif block.tipe == MergeBlockType.INSERT and not (mergeOperation.value & MergeOperation.REMOVE.value): # line 415 - out.extend(block.lines) # line 415 - elif block.tipe == MergeBlockType.REMOVE and mergeOperation.value & MergeOperation.INSERT.value: # line 416 + if op == "m": # line 411 + user_block_input(out) # line 411 + break # line 411 + else: # non-interactive # line 412 + if mergeOperation == MergeOperation.REMOVE: # line 413 + pass # line 413 + elif mergeOperation == MergeOperation.BOTH: # line 414 + out.extend(block.lines) # line 414 + elif mergeOperation == MergeOperation.INSERT: # line 415 + out.extend(list(block.replaces.lines) + list(block.lines)) # line 415 + elif block.tipe == MergeBlockType.INSERT and not (mergeOperation.value & MergeOperation.REMOVE.value): # line 416 out.extend(block.lines) # line 416 + elif block.tipe == MergeBlockType.REMOVE and mergeOperation.value & MergeOperation.INSERT.value: # line 417 + out.extend(block.lines) # line 417 # TODO ask for insert or remove as well - return _coconut_tail_call("".join, out) # line 418 + return _coconut_tail_call("".join, out) # line 419 -def findSosVcsBase() -> 'Tuple[_coconut.typing.Optional[str], _coconut.typing.Optional[str], _coconut.typing.Optional[str]]': # line 420 +def findSosVcsBase() -> 'Tuple[_coconut.typing.Optional[str], _coconut.typing.Optional[str], _coconut.typing.Optional[str]]': # line 421 ''' Attempts to find sos and legacy VCS base folders. Returns (SOS-repo root, VCS-repo root, VCS command) - ''' # line 423 - debug("Detecting root folders...") # line 424 - path = os.getcwd() # type: str # start in current folder, check parent until found or stopped # line 425 - vcs = (None, None) # type: Tuple[_coconut.typing.Optional[str], _coconut.typing.Optional[str]] # line 426 - while not os.path.exists(encode(os.path.join(path, metaFolder))): # line 427 - contents = set(os.listdir(path)) # type: Set[str] # line 428 - vcss = [executable for folder, executable in vcsFolders.items() if folder in contents or (SLASH in folder and os.path.exists(os.path.join(os.getcwd(), folder.replace(SLASH, os.sep))))] # type: _coconut.typing.Sequence[str] # determine VCS type from existence of dot folder # line 429 - choice = None # type: _coconut.typing.Optional[str] # line 430 - if len(vcss) > 1: # line 431 - choice = SVN if SVN in vcss else vcss[0] # SVN is preferred # line 432 - warn("Detected more than one parallel VCS checkouts %r. Falling back to '%s'" % (vcss, choice)) # line 433 - elif len(vcss) > 0: # line 434 - choice = vcss[0] # line 434 - if not vcs[0] and choice: # memorize current repo root # line 435 - vcs = (path, choice) # memorize current repo root # line 435 - new = os.path.dirname(path) # get parent path # line 436 - if new == path: # avoid infinite loop # line 437 - break # avoid infinite loop # line 437 - path = new # line 438 - if os.path.exists(encode(os.path.join(path, metaFolder))): # found something # line 439 - if vcs[0]: # already detected vcs base and command # line 440 - return (path, vcs[0], vcs[1]) # already detected vcs base and command # line 440 - sos = path # line 441 - while True: # continue search for VCS base # line 442 - contents = set(os.listdir(path)) # line 443 - vcss = [executable for folder, executable in vcsFolders.items() if folder in contents] # determine VCS type # line 444 - choice = None # line 445 - if len(vcss) > 1: # line 446 - choice = SVN if SVN in vcss else vcss[0] # line 447 - warn("Detected more than one parallel VCS checkouts %r. Falling back to '%s'" % (vcss, choice)) # line 448 - elif len(vcss) > 0: # line 449 - choice = vcss[0] # line 449 - if choice: # line 450 - return (sos, path, choice) # line 450 - new = os.path.dirname(path) # get parent path # line 451 - if new == path: # no VCS folder found # line 452 - return (sos, None, None) # no VCS folder found # line 452 - path = new # line 453 - return (None, vcs[0], vcs[1]) # line 454 - -def tokenizeGlobPattern(pattern: 'str') -> 'List[GlobBlock]': # line 456 - index = 0 # type: int # line 457 - out = [] # type: List[GlobBlock] # literal = True, first index # line 458 - while index < len(pattern): # line 459 - if pattern[index:index + 3] in ("[?]", "[*]", "[[]", "[]]"): # line 460 - out.append(GlobBlock(False, pattern[index:index + 3], index)) # line 460 - continue # line 460 - if pattern[index] in "*?": # line 461 - count = 1 # type: int # line 462 - while index + count < len(pattern) and pattern[index] == "?" and pattern[index + count] == "?": # line 463 - count += 1 # line 463 - out.append(GlobBlock(False, pattern[index:index + count], index)) # line 464 - index += count # line 464 - continue # line 464 - if pattern[index:index + 2] == "[!": # line 465 - out.append(GlobBlock(False, pattern[index:pattern.index("]", index + 2) + 1], index)) # line 465 - index += len(out[-1][1]) # line 465 + ''' # line 424 + debug("Detecting root folders...") # line 425 + path = os.getcwd() # type: str # start in current folder, check parent until found or stopped # line 426 + vcs = (None, None) # type: Tuple[_coconut.typing.Optional[str], _coconut.typing.Optional[str]] # line 427 + while not os.path.exists(encode(os.path.join(path, metaFolder))): # line 428 + contents = set(os.listdir(path)) # type: Set[str] # line 429 + vcss = [executable for folder, executable in vcsFolders.items() if folder in contents or (SLASH in folder and os.path.exists(os.path.join(os.getcwd(), folder.replace(SLASH, os.sep))))] # type: _coconut.typing.Sequence[str] # determine VCS type from existence of dot folder # line 430 + choice = None # type: _coconut.typing.Optional[str] # line 431 + if len(vcss) > 1: # line 432 + choice = SVN if SVN in vcss else vcss[0] # SVN is preferred # line 433 + warn("Detected more than one parallel VCS checkouts %r. Falling back to '%s'" % (vcss, choice)) # line 434 + elif len(vcss) > 0: # line 435 + choice = vcss[0] # line 435 + if not vcs[0] and choice: # memorize current repo root # line 436 + vcs = (path, choice) # memorize current repo root # line 436 + new = os.path.dirname(path) # get parent path # line 437 + if new == path: # avoid infinite loop # line 438 + break # avoid infinite loop # line 438 + path = new # line 439 + if os.path.exists(encode(os.path.join(path, metaFolder))): # found something # line 440 + if vcs[0]: # already detected vcs base and command # line 441 + return (path, vcs[0], vcs[1]) # already detected vcs base and command # line 441 + sos = path # line 442 + while True: # continue search for VCS base # line 443 + contents = set(os.listdir(path)) # line 444 + vcss = [executable for folder, executable in vcsFolders.items() if folder in contents] # determine VCS type # line 445 + choice = None # line 446 + if len(vcss) > 1: # line 447 + choice = SVN if SVN in vcss else vcss[0] # line 448 + warn("Detected more than one parallel VCS checkouts %r. Falling back to '%s'" % (vcss, choice)) # line 449 + elif len(vcss) > 0: # line 450 + choice = vcss[0] # line 450 + if choice: # line 451 + return (sos, path, choice) # line 451 + new = os.path.dirname(path) # get parent path # line 452 + if new == path: # no VCS folder found # line 453 + return (sos, None, None) # no VCS folder found # line 453 + path = new # line 454 + return (None, vcs[0], vcs[1]) # line 455 + +def tokenizeGlobPattern(pattern: 'str') -> 'List[GlobBlock]': # line 457 + index = 0 # type: int # line 458 + out = [] # type: List[GlobBlock] # literal = True, first index # line 459 + while index < len(pattern): # line 460 + if pattern[index:index + 3] in ("[?]", "[*]", "[[]", "[]]"): # line 461 + out.append(GlobBlock(False, pattern[index:index + 3], index)) # line 461 + continue # line 461 + if pattern[index] in "*?": # line 462 + count = 1 # type: int # line 463 + while index + count < len(pattern) and pattern[index] == "?" and pattern[index + count] == "?": # line 464 + count += 1 # line 464 + out.append(GlobBlock(False, pattern[index:index + count], index)) # line 465 + index += count # line 465 continue # line 465 - count = 1 # line 466 - while index + count < len(pattern) and pattern[index + count] not in "*?[": # line 467 - count += 1 # line 467 - out.append(GlobBlock(True, pattern[index:index + count], index)) # line 468 - index += count # line 468 - return out # line 469 - -def tokenizeGlobPatterns(oldPattern: 'str', newPattern: 'str') -> 'Tuple[_coconut.typing.Sequence[GlobBlock], _coconut.typing.Sequence[GlobBlock]]': # line 471 - ot = tokenizeGlobPattern(oldPattern) # type: List[GlobBlock] # line 472 - nt = tokenizeGlobPattern(newPattern) # type: List[GlobBlock] # line 473 + if pattern[index:index + 2] == "[!": # line 466 + out.append(GlobBlock(False, pattern[index:pattern.index("]", index + 2) + 1], index)) # line 466 + index += len(out[-1][1]) # line 466 + continue # line 466 + count = 1 # line 467 + while index + count < len(pattern) and pattern[index + count] not in "*?[": # line 468 + count += 1 # line 468 + out.append(GlobBlock(True, pattern[index:index + count], index)) # line 469 + index += count # line 469 + return out # line 470 + +def tokenizeGlobPatterns(oldPattern: 'str', newPattern: 'str') -> 'Tuple[_coconut.typing.Sequence[GlobBlock], _coconut.typing.Sequence[GlobBlock]]': # line 472 + ot = tokenizeGlobPattern(oldPattern) # type: List[GlobBlock] # line 473 + nt = tokenizeGlobPattern(newPattern) # type: List[GlobBlock] # line 474 # if len(ot) != len(nt): Exit("Source and target patterns can't be translated due to differing number of parsed glob markers and literal strings") - if len([o for o in ot if not o.isLiteral]) < len([n for n in nt if not n.isLiteral]): # line 475 - Exit("Source and target file patterns contain differing number of glob markers and can't be translated") # line 475 - if any((O.content != N.content for O, N in zip([o for o in ot if not o.isLiteral], [n for n in nt if not n.isLiteral]))): # line 476 - Exit("Source and target file patterns differ in semantics") # line 476 - return (ot, nt) # line 477 - -def convertGlobFiles(filenames: '_coconut.typing.Sequence[str]', oldPattern: '_coconut.typing.Sequence[GlobBlock]', newPattern: '_coconut.typing.Sequence[GlobBlock]') -> '_coconut.typing.Sequence[Tuple[str, str]]': # line 479 - ''' Converts given filename according to specified file patterns. No support for adjacent glob markers currently. ''' # line 480 - pairs = [] # type: List[Tuple[str, str]] # line 481 - for filename in filenames: # line 482 - literals = [l for l in oldPattern if l.isLiteral] # type: List[GlobBlock] # source literals # line 483 - nextliteral = 0 # type: int # line 484 - index = 0 # type: int # line 484 - parsedOld = [] # type: List[GlobBlock2] # line 485 - for part in oldPattern: # match everything in the old filename # line 486 - if part.isLiteral: # line 487 - parsedOld.append(GlobBlock2(True, part.content, part.content)) # line 487 - index += len(part.content) # line 487 - nextliteral += 1 # line 487 - elif part.content.startswith("?"): # line 488 - parsedOld.append(GlobBlock2(False, part.content, filename[index:index + len(part.content)])) # line 488 + if len([o for o in ot if not o.isLiteral]) < len([n for n in nt if not n.isLiteral]): # line 476 + Exit("Source and target file patterns contain differing number of glob markers and can't be translated") # line 476 + if any((O.content != N.content for O, N in zip([o for o in ot if not o.isLiteral], [n for n in nt if not n.isLiteral]))): # line 477 + Exit("Source and target file patterns differ in semantics") # line 477 + return (ot, nt) # line 478 + +def convertGlobFiles(filenames: '_coconut.typing.Sequence[str]', oldPattern: '_coconut.typing.Sequence[GlobBlock]', newPattern: '_coconut.typing.Sequence[GlobBlock]') -> '_coconut.typing.Sequence[Tuple[str, str]]': # line 480 + ''' Converts given filename according to specified file patterns. No support for adjacent glob markers currently. ''' # line 481 + pairs = [] # type: List[Tuple[str, str]] # line 482 + for filename in filenames: # line 483 + literals = [l for l in oldPattern if l.isLiteral] # type: List[GlobBlock] # source literals # line 484 + nextliteral = 0 # type: int # line 485 + index = 0 # type: int # line 485 + parsedOld = [] # type: List[GlobBlock2] # line 486 + for part in oldPattern: # match everything in the old filename # line 487 + if part.isLiteral: # line 488 + parsedOld.append(GlobBlock2(True, part.content, part.content)) # line 488 index += len(part.content) # line 488 - elif part.content.startswith("["): # line 489 - parsedOld.append(GlobBlock2(False, part.content, filename[index])) # line 489 - index += 1 # line 489 - elif part.content == "*": # line 490 - if nextliteral >= len(literals): # line 491 - parsedOld.append(GlobBlock2(False, part.content, filename[index:])) # line 491 - break # line 491 - nxt = filename.index(literals[nextliteral].content, index) # type: int # also matches empty string # line 492 - parsedOld.append(GlobBlock2(False, part.content, filename[index:nxt])) # line 493 - index = nxt # line 493 - else: # line 494 - Exit("Invalid file pattern specified for move/rename") # line 494 - globs = [g for g in parsedOld if not g.isLiteral] # type: List[GlobBlock2] # line 495 - literals = [l for l in newPattern if l.isLiteral] # target literals # line 496 - nextliteral = 0 # line 497 - nextglob = 0 # type: int # line 497 - outname = [] # type: List[str] # line 498 - for part in newPattern: # generate new filename # line 499 - if part.isLiteral: # line 500 - outname.append(literals[nextliteral].content) # line 500 - nextliteral += 1 # line 500 - else: # line 501 - outname.append(globs[nextglob].matches) # line 501 - nextglob += 1 # line 501 - pairs.append((filename, "".join(outname))) # line 502 - return pairs # line 503 - -@_coconut_tco # line 505 -def reorderRenameActions(actions: '_coconut.typing.Sequence[Tuple[str, str]]', exitOnConflict: 'bool'=True) -> '_coconut.typing.Sequence[Tuple[str, str]]': # line 505 + nextliteral += 1 # line 488 + elif part.content.startswith("?"): # line 489 + parsedOld.append(GlobBlock2(False, part.content, filename[index:index + len(part.content)])) # line 489 + index += len(part.content) # line 489 + elif part.content.startswith("["): # line 490 + parsedOld.append(GlobBlock2(False, part.content, filename[index])) # line 490 + index += 1 # line 490 + elif part.content == "*": # line 491 + if nextliteral >= len(literals): # line 492 + parsedOld.append(GlobBlock2(False, part.content, filename[index:])) # line 492 + break # line 492 + nxt = filename.index(literals[nextliteral].content, index) # type: int # also matches empty string # line 493 + parsedOld.append(GlobBlock2(False, part.content, filename[index:nxt])) # line 494 + index = nxt # line 494 + else: # line 495 + Exit("Invalid file pattern specified for move/rename") # line 495 + globs = [g for g in parsedOld if not g.isLiteral] # type: List[GlobBlock2] # line 496 + literals = [l for l in newPattern if l.isLiteral] # target literals # line 497 + nextliteral = 0 # line 498 + nextglob = 0 # type: int # line 498 + outname = [] # type: List[str] # line 499 + for part in newPattern: # generate new filename # line 500 + if part.isLiteral: # line 501 + outname.append(literals[nextliteral].content) # line 501 + nextliteral += 1 # line 501 + else: # line 502 + outname.append(globs[nextglob].matches) # line 502 + nextglob += 1 # line 502 + pairs.append((filename, "".join(outname))) # line 503 + return pairs # line 504 + +@_coconut_tco # line 506 +def reorderRenameActions(actions: '_coconut.typing.Sequence[Tuple[str, str]]', exitOnConflict: 'bool'=True) -> '_coconut.typing.Sequence[Tuple[str, str]]': # line 506 ''' Attempt to put all rename actions into an order that avoids target == source names. Note, that it's currently not really possible to specify patterns that make this work (swapping "*" elements with a reference). An alternative would be to always have one (or all) files renamed to a temporary name before renaming to target filename. - ''' # line 509 - if not actions: # line 510 - return [] # line 510 - sources = None # type: List[str] # line 511 - targets = None # type: List[str] # line 511 - sources, targets = [list(l) for l in zip(*actions)] # line 512 - last = len(actions) # type: int # line 513 - while last > 1: # line 514 - clean = True # type: bool # line 515 - for i in range(1, last): # line 516 - try: # line 517 - index = targets[:i].index(sources[i]) # type: int # line 518 - sources.insert(index, sources.pop(i)) # bubble up the action right before conflict # line 519 - targets.insert(index, targets.pop(i)) # line 520 - clean = False # line 521 - except: # target not found in sources: good! # line 522 - continue # target not found in sources: good! # line 522 - if clean: # line 523 - break # line 523 - last -= 1 # we know that the last entry in the list has the least conflicts, so we can disregard it in the next iteration # line 524 - if exitOnConflict: # line 525 - for i in range(1, len(actions)): # line 525 - if sources[i] in targets[:i]: # line 525 - Exit("There is no order of renaming actions that avoids copying over not-yet renamed files: '%s' is contained in matching source filenames" % (targets[i])) # line 525 - return _coconut_tail_call(list, zip(sources, targets)) # convert to list to avoid generators # line 526 - -def relativize(root: 'str', filepath: 'str') -> 'Tuple[str, str]': # line 528 - ''' Determine OS-independent relative folder path, and relative pattern path. Always expects a file and determines its folder's relative path. ''' # line 529 - relpath = os.path.relpath(os.path.dirname(os.path.abspath(filepath)), root).replace(os.sep, SLASH) # line 530 - return relpath, os.path.join(relpath, os.path.basename(filepath)).replace(os.sep, SLASH) # line 531 - -def parseOnlyOptions(cwd: 'str', options: 'List[str]') -> 'Tuple[_coconut.typing.Optional[FrozenSet[str]], _coconut.typing.Optional[FrozenSet[str]]]': # line 533 - ''' Returns (root-normalized) set of --only arguments, and set or --except arguments. ''' # line 534 - root = os.getcwd() # type: str # line 535 - onlys = [] # type: List[str] # zero necessary as last start position # line 536 - excps = [] # type: List[str] # zero necessary as last start position # line 536 - index = 0 # type: int # zero necessary as last start position # line 536 - while True: # line 537 - try: # line 538 - index = 1 + listindex(options, "--only", index) # line 539 - onlys.append(options[index]) # line 540 - del options[index] # line 541 - del options[index - 1] # line 542 - except: # line 543 - break # line 543 - index = 0 # line 544 - while True: # line 545 - try: # line 546 - index = 1 + listindex(options, "--except", index) # line 547 - excps.append(options[index]) # line 548 - del options[index] # line 549 - del options[index - 1] # line 550 - except: # line 551 - break # line 551 - return (frozenset((oo for oo in (relativize(root, os.path.normpath(os.path.join(cwd, o)))[1] for o in onlys) if not oo.startswith(".." + SLASH))) if onlys else None, frozenset((ee for ee in (relativize(root, os.path.normpath(os.path.join(cwd, e)))[1] for e in excps) if not ee.startswith(".." + SLASH))) if excps else None) # avoids out-of-repo paths # line 552 + ''' # line 510 + if not actions: # line 511 + return [] # line 511 + sources = None # type: List[str] # line 512 + targets = None # type: List[str] # line 512 + sources, targets = [list(l) for l in zip(*actions)] # line 513 + last = len(actions) # type: int # line 514 + while last > 1: # line 515 + clean = True # type: bool # line 516 + for i in range(1, last): # line 517 + try: # line 518 + index = targets[:i].index(sources[i]) # type: int # line 519 + sources.insert(index, sources.pop(i)) # bubble up the action right before conflict # line 520 + targets.insert(index, targets.pop(i)) # line 521 + clean = False # line 522 + except: # target not found in sources: good! # line 523 + continue # target not found in sources: good! # line 523 + if clean: # line 524 + break # line 524 + last -= 1 # we know that the last entry in the list has the least conflicts, so we can disregard it in the next iteration # line 525 + if exitOnConflict: # line 526 + for i in range(1, len(actions)): # line 526 + if sources[i] in targets[:i]: # line 526 + Exit("There is no order of renaming actions that avoids copying over not-yet renamed files: '%s' is contained in matching source filenames" % (targets[i])) # line 526 + return _coconut_tail_call(list, zip(sources, targets)) # convert to list to avoid generators # line 527 + +def relativize(root: 'str', filepath: 'str') -> 'Tuple[str, str]': # line 529 + ''' Determine OS-independent relative folder path, and relative pattern path. Always expects a file and determines its folder's relative path. ''' # line 530 + relpath = os.path.relpath(os.path.dirname(os.path.abspath(filepath)), root).replace(os.sep, SLASH) # line 531 + return relpath, os.path.join(relpath, os.path.basename(filepath)).replace(os.sep, SLASH) # line 532 + +def parseOnlyOptions(cwd: 'str', options: 'List[str]') -> 'Tuple[_coconut.typing.Optional[FrozenSet[str]], _coconut.typing.Optional[FrozenSet[str]]]': # line 534 + ''' Returns (root-normalized) set of --only arguments, and set or --except arguments. ''' # line 535 + root = os.getcwd() # type: str # line 536 + onlys = [] # type: List[str] # zero necessary as last start position # line 537 + excps = [] # type: List[str] # zero necessary as last start position # line 537 + index = 0 # type: int # zero necessary as last start position # line 537 + while True: # line 538 + try: # line 539 + index = 1 + listindex(options, "--only", index) # line 540 + onlys.append(options[index]) # line 541 + del options[index] # line 542 + del options[index - 1] # line 543 + except: # line 544 + break # line 544 + index = 0 # line 545 + while True: # line 546 + try: # line 547 + index = 1 + listindex(options, "--except", index) # line 548 + excps.append(options[index]) # line 549 + del options[index] # line 550 + del options[index - 1] # line 551 + except: # line 552 + break # line 552 + return (frozenset((oo for oo in (relativize(root, os.path.normpath(os.path.join(cwd, o)))[1] for o in onlys) if not oo.startswith(".." + SLASH))) if onlys else None, frozenset((ee for ee in (relativize(root, os.path.normpath(os.path.join(cwd, e)))[1] for e in excps) if not ee.startswith(".." + SLASH))) if excps else None) # avoids out-of-repo paths # line 553 diff --git a/sos/version.py b/sos/version.py old mode 100755 new mode 100644 index 79f82ac..17be2cd --- a/sos/version.py +++ b/sos/version.py @@ -1,3 +1,3 @@ -__version_info__ = (2018, 1425, 3206) -__version__ = r'2018.1425.3206-v1.5.0-47-gfc41f16' -__release_version__ = '1.5.3' \ No newline at end of file +__version_info__ = (2018, 1512, 2741) +__version__ = r'2018.1512.2741-v1.5.0-48-gbc08f48' +__release_version__ = '1.5.4' \ No newline at end of file