diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1,7657 +1,7663 @@ # commands.py - command processing for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import re import sys from .i18n import _ from .node import ( hex, nullid, nullrev, short, wdirhex, wdirrev, ) from .pycompat import open from . import ( archival, bookmarks, bundle2, changegroup, cmdutil, copies, debugcommands as debugcommandsmod, destutil, dirstateguard, discovery, encoding, error, exchange, extensions, filemerge, formatter, graphmod, grep as grepmod, hbisect, help, hg, logcmdutil, merge as mergemod, mergestate as mergestatemod, narrowspec, obsolete, obsutil, patch, phases, pycompat, rcutil, registrar, requirements, revsetlang, rewriteutil, scmutil, server, shelve as shelvemod, state as statemod, streamclone, tags as tagsmod, ui as uimod, util, verify as verifymod, vfs as vfsmod, wireprotoserver, ) from .utils import ( dateutil, stringutil, ) table = {} table.update(debugcommandsmod.command._table) command = registrar.command(table) INTENT_READONLY = registrar.INTENT_READONLY # common command options globalopts = [ ( b'R', b'repository', b'', _(b'repository root directory or name of overlay bundle file'), _(b'REPO'), ), (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')), ( b'y', b'noninteractive', None, _( b'do not prompt, automatically pick the first choice for all prompts' ), ), (b'q', b'quiet', None, _(b'suppress output')), (b'v', b'verbose', None, _(b'enable additional output')), ( b'', b'color', b'', # i18n: 'always', 'auto', 'never', and 'debug' are keywords # and should not be translated _(b"when to colorize (boolean, always, auto, never, or debug)"), _(b'TYPE'), ), ( b'', b'config', [], _(b'set/override config option (use \'section.name=value\')'), _(b'CONFIG'), ), (b'', b'debug', None, _(b'enable debugging output')), (b'', b'debugger', None, _(b'start debugger')), ( b'', b'encoding', encoding.encoding, _(b'set the charset encoding'), _(b'ENCODE'), ), ( b'', b'encodingmode', encoding.encodingmode, _(b'set the charset encoding mode'), _(b'MODE'), ), (b'', b'traceback', None, _(b'always print a traceback on exception')), (b'', b'time', None, _(b'time how long the command takes')), (b'', b'profile', None, _(b'print command execution profile')), (b'', b'version', None, _(b'output version information and exit')), (b'h', b'help', None, _(b'display help and exit')), (b'', b'hidden', False, _(b'consider hidden changesets')), ( b'', b'pager', b'auto', _(b"when to paginate (boolean, always, auto, or never)"), _(b'TYPE'), ), ] dryrunopts = cmdutil.dryrunopts remoteopts = cmdutil.remoteopts walkopts = cmdutil.walkopts commitopts = cmdutil.commitopts commitopts2 = cmdutil.commitopts2 commitopts3 = cmdutil.commitopts3 formatteropts = cmdutil.formatteropts templateopts = cmdutil.templateopts logopts = cmdutil.logopts diffopts = cmdutil.diffopts diffwsopts = cmdutil.diffwsopts diffopts2 = cmdutil.diffopts2 mergetoolopts = cmdutil.mergetoolopts similarityopts = cmdutil.similarityopts subrepoopts = cmdutil.subrepoopts debugrevlogopts = cmdutil.debugrevlogopts # Commands start here, listed alphabetically @command( b'abort', dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, helpbasic=True, ) def abort(ui, repo, **opts): """abort an unfinished operation (EXPERIMENTAL) Aborts a multistep operation like graft, histedit, rebase, merge, and unshelve if they are in an unfinished state. use --dry-run/-n to dry run the command. """ dryrun = opts.get('dry_run') abortstate = cmdutil.getunfinishedstate(repo) if not abortstate: raise error.Abort(_(b'no operation in progress')) if not abortstate.abortfunc: raise error.Abort( ( _(b"%s in progress but does not support 'hg abort'") % (abortstate._opname) ), hint=abortstate.hint(), ) if dryrun: ui.status( _(b'%s in progress, will be aborted\n') % (abortstate._opname) ) return return abortstate.abortfunc(ui, repo) @command( b'add', walkopts + subrepoopts + dryrunopts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, inferrepo=True, ) def add(ui, repo, *pats, **opts): """add the specified files on the next commit Schedule files to be version controlled and added to the repository. The files will be added to the repository at the next commit. To undo an add before that, see :hg:`forget`. If no names are given, add all files to the repository (except files matching ``.hgignore``). .. container:: verbose Examples: - New (unknown) files are added automatically by :hg:`add`:: $ ls foo.c $ hg status ? foo.c $ hg add adding foo.c $ hg status A foo.c - Specific files to be added can be specified:: $ ls bar.c foo.c $ hg status ? bar.c ? foo.c $ hg add bar.c $ hg status A bar.c ? foo.c Returns 0 if all files are successfully added. """ m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts) return rejected and 1 or 0 @command( b'addremove', similarityopts + subrepoopts + walkopts + dryrunopts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, inferrepo=True, ) def addremove(ui, repo, *pats, **opts): """add all new files, delete all missing files Add all new files and remove all missing files from the repository. Unless names are given, new files are ignored if they match any of the patterns in ``.hgignore``. As with add, these changes take effect at the next commit. Use the -s/--similarity option to detect renamed files. This option takes a percentage between 0 (disabled) and 100 (files must be identical) as its parameter. With a parameter greater than 0, this compares every removed file with every added file and records those similar enough as renames. Detecting renamed files this way can be expensive. After using this option, :hg:`status -C` can be used to check which files were identified as moved or renamed. If not specified, -s/--similarity defaults to 100 and only renames of identical files are detected. .. container:: verbose Examples: - A number of files (bar.c and foo.c) are new, while foobar.c has been removed (without using :hg:`remove`) from the repository:: $ ls bar.c foo.c $ hg status ! foobar.c ? bar.c ? foo.c $ hg addremove adding bar.c adding foo.c removing foobar.c $ hg status A bar.c A foo.c R foobar.c - A file foobar.c was moved to foo.c without using :hg:`rename`. Afterwards, it was edited slightly:: $ ls foo.c $ hg status ! foobar.c ? foo.c $ hg addremove --similarity 90 removing foobar.c adding foo.c recording removal of foobar.c as rename to foo.c (94% similar) $ hg status -C A foo.c foobar.c R foobar.c Returns 0 if all files are successfully added. """ opts = pycompat.byteskwargs(opts) if not opts.get(b'similarity'): opts[b'similarity'] = b'100' matcher = scmutil.match(repo[None], pats, opts) relative = scmutil.anypats(pats, opts) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) return scmutil.addremove(repo, matcher, b"", uipathfn, opts) @command( b'annotate|blame', [ (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')), ( b'', b'follow', None, _(b'follow copies/renames and list the filename (DEPRECATED)'), ), (b'', b'no-follow', None, _(b"don't follow copies and renames")), (b'a', b'text', None, _(b'treat all files as text')), (b'u', b'user', None, _(b'list the author (long with -v)')), (b'f', b'file', None, _(b'list the filename')), (b'd', b'date', None, _(b'list the date (short with -q)')), (b'n', b'number', None, _(b'list the revision number (default)')), (b'c', b'changeset', None, _(b'list the changeset')), ( b'l', b'line-number', None, _(b'show line number at the first appearance'), ), ( b'', b'skip', [], _(b'revset to not display (EXPERIMENTAL)'), _(b'REV'), ), ] + diffwsopts + walkopts + formatteropts, _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'), helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True, inferrepo=True, ) def annotate(ui, repo, *pats, **opts): """show changeset information by line for each file List changes in files, showing the revision id responsible for each line. This command is useful for discovering when a change was made and by whom. If you include --file, --user, or --date, the revision number is suppressed unless you also include --number. Without the -a/--text option, annotate will avoid processing files it detects as binary. With -a, annotate will annotate the file anyway, although the results will probably be neither useful nor desirable. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :lines: List of lines with annotation data. :path: String. Repository-absolute path of the specified file. And each entry of ``{lines}`` provides the following sub-keywords in addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc. :line: String. Line content. :lineno: Integer. Line number at that revision. :path: String. Repository-absolute path of the file at that revision. See :hg:`help templates.operators` for the list expansion syntax. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) if not pats: raise error.Abort(_(b'at least one filename or pattern is required')) if opts.get(b'follow'): # --follow is deprecated and now just an alias for -f/--file # to mimic the behavior of Mercurial before version 1.5 opts[b'file'] = True if ( not opts.get(b'user') and not opts.get(b'changeset') and not opts.get(b'date') and not opts.get(b'file') ): opts[b'number'] = True linenumber = opts.get(b'line_number') is not None if ( linenumber and (not opts.get(b'changeset')) and (not opts.get(b'number')) ): raise error.Abort(_(b'at least one of -n/-c is required for -l')) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev) ui.pager(b'annotate') rootfm = ui.formatter(b'annotate', opts) if ui.debugflag: shorthex = pycompat.identity else: def shorthex(h): return h[:12] if ui.quiet: datefunc = dateutil.shortdate else: datefunc = dateutil.datestr if ctx.rev() is None: if opts.get(b'changeset'): # omit "+" suffix which is appended to node hex def formatrev(rev): if rev == wdirrev: return b'%d' % ctx.p1().rev() else: return b'%d' % rev else: def formatrev(rev): if rev == wdirrev: return b'%d+' % ctx.p1().rev() else: return b'%d ' % rev def formathex(h): if h == wdirhex: return b'%s+' % shorthex(hex(ctx.p1().node())) else: return b'%s ' % shorthex(h) else: formatrev = b'%d'.__mod__ formathex = shorthex opmap = [ (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser), (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev), (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex), (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)), (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr), (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr), ] opnamemap = { b'rev': b'number', b'node': b'changeset', b'path': b'file', b'lineno': b'line_number', } if rootfm.isplain(): def makefunc(get, fmt): return lambda x: fmt(get(x)) else: def makefunc(get, fmt): return get datahint = rootfm.datahint() funcmap = [ (makefunc(get, fmt), sep) for fn, sep, get, fmt in opmap if opts.get(opnamemap.get(fn, fn)) or fn in datahint ] funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column fields = b' '.join( fn for fn, sep, get, fmt in opmap if opts.get(opnamemap.get(fn, fn)) or fn in datahint ) def bad(x, y): raise error.Abort(b"%s: %s" % (x, y)) m = scmutil.match(ctx, pats, opts, badfn=bad) follow = not opts.get(b'no_follow') diffopts = patch.difffeatureopts( ui, opts, section=b'annotate', whitespace=True ) skiprevs = opts.get(b'skip') if skiprevs: skiprevs = scmutil.revrange(repo, skiprevs) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for abs in ctx.walk(m): fctx = ctx[abs] rootfm.startitem() rootfm.data(path=abs) if not opts.get(b'text') and fctx.isbinary(): rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs)) continue fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}') lines = fctx.annotate( follow=follow, skiprevs=skiprevs, diffopts=diffopts ) if not lines: fm.end() continue formats = [] pieces = [] for f, sep in funcmap: l = [f(n) for n in lines] if fm.isplain(): sizes = [encoding.colwidth(x) for x in l] ml = max(sizes) formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes]) else: formats.append([b'%s'] * len(l)) pieces.append(l) for f, p, n in zip(zip(*formats), zip(*pieces), lines): fm.startitem() fm.context(fctx=n.fctx) fm.write(fields, b"".join(f), *p) if n.skip: fmt = b"* %s" else: fmt = b": %s" fm.write(b'line', fmt, n.text) if not lines[-1].text.endswith(b'\n'): fm.plain(b'\n') fm.end() rootfm.end() @command( b'archive', [ (b'', b'no-decode', None, _(b'do not pass files through decoders')), ( b'p', b'prefix', b'', _(b'directory prefix for files in archive'), _(b'PREFIX'), ), (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')), (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')), ] + subrepoopts + walkopts, _(b'[OPTION]... DEST'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) def archive(ui, repo, dest, **opts): '''create an unversioned archive of a repository revision By default, the revision used is the parent of the working directory; use -r/--rev to specify a different revision. The archive type is automatically detected based on file extension (to override, use -t/--type). .. container:: verbose Examples: - create a zip file containing the 1.0 release:: hg archive -r 1.0 project-1.0.zip - create a tarball excluding .hg files:: hg archive project.tar.gz -X ".hg*" Valid types are: :``files``: a directory full of files (default) :``tar``: tar archive, uncompressed :``tbz2``: tar archive, compressed using bzip2 :``tgz``: tar archive, compressed using gzip :``txz``: tar archive, compressed using lzma (only in Python 3) :``uzip``: zip archive, uncompressed :``zip``: zip archive, compressed using deflate The exact name of the destination archive or directory is given using a format string; see :hg:`help export` for details. Each member added to an archive file has a directory prefix prepended. Use -p/--prefix to specify a format string for the prefix. The default is the basename of the archive, with suffixes removed. Returns 0 on success. ''' opts = pycompat.byteskwargs(opts) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev) if not ctx: raise error.Abort(_(b'no working directory: please specify a revision')) node = ctx.node() dest = cmdutil.makefilename(ctx, dest) if os.path.realpath(dest) == repo.root: raise error.Abort(_(b'repository root cannot be destination')) kind = opts.get(b'type') or archival.guesskind(dest) or b'files' prefix = opts.get(b'prefix') if dest == b'-': if kind == b'files': raise error.Abort(_(b'cannot archive plain files to stdout')) dest = cmdutil.makefileobj(ctx, dest) if not prefix: prefix = os.path.basename(repo.root) + b'-%h' prefix = cmdutil.makefilename(ctx, prefix) match = scmutil.match(ctx, [], opts) archival.archive( repo, dest, node, kind, not opts.get(b'no_decode'), match, prefix, subrepos=opts.get(b'subrepos'), ) @command( b'backout', [ ( b'', b'merge', None, _(b'merge with old dirstate parent after backout'), ), ( b'', b'commit', None, _(b'commit if no conflicts were encountered (DEPRECATED)'), ), (b'', b'no-commit', None, _(b'do not commit')), ( b'', b'parent', b'', _(b'parent to choose when backing out merge (DEPRECATED)'), _(b'REV'), ), (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')), (b'e', b'edit', False, _(b'invoke editor on commit messages')), ] + mergetoolopts + walkopts + commitopts + commitopts2, _(b'[OPTION]... [-r] REV'), helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, ) def backout(ui, repo, node=None, rev=None, **opts): '''reverse effect of earlier changeset Prepare a new changeset with the effect of REV undone in the current working directory. If no conflicts were encountered, it will be committed immediately. If REV is the parent of the working directory, then this new changeset is committed automatically (unless --no-commit is specified). .. note:: :hg:`backout` cannot be used to fix either an unwanted or incorrect merge. .. container:: verbose Examples: - Reverse the effect of the parent of the working directory. This backout will be committed immediately:: hg backout -r . - Reverse the effect of previous bad revision 23:: hg backout -r 23 - Reverse the effect of previous bad revision 23 and leave changes uncommitted:: hg backout -r 23 --no-commit hg commit -m "Backout revision 23" By default, the pending changeset will have one parent, maintaining a linear history. With --merge, the pending changeset will instead have two parents: the old parent of the working directory and a new child of REV that simply undoes REV. Before version 1.7, the behavior without --merge was equivalent to specifying --merge followed by :hg:`update --clean .` to cancel the merge and leave the child of REV as a head to be merged separately. See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help revert` for a way to restore files to the state of another revision. Returns 0 on success, 1 if nothing to backout or there are unresolved files. ''' with repo.wlock(), repo.lock(): return _dobackout(ui, repo, node, rev, **opts) def _dobackout(ui, repo, node=None, rev=None, **opts): cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge']) opts = pycompat.byteskwargs(opts) if rev and node: raise error.Abort(_(b"please specify just one revision")) if not rev: rev = node if not rev: raise error.Abort(_(b"please specify a revision to backout")) date = opts.get(b'date') if date: opts[b'date'] = dateutil.parsedate(date) cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) ctx = scmutil.revsingle(repo, rev) node = ctx.node() op1, op2 = repo.dirstate.parents() if not repo.changelog.isancestor(node, op1): raise error.Abort(_(b'cannot backout change that is not an ancestor')) p1, p2 = repo.changelog.parents(node) if p1 == nullid: raise error.Abort(_(b'cannot backout a change with no parents')) if p2 != nullid: if not opts.get(b'parent'): raise error.Abort(_(b'cannot backout a merge changeset')) p = repo.lookup(opts[b'parent']) if p not in (p1, p2): raise error.Abort( _(b'%s is not a parent of %s') % (short(p), short(node)) ) parent = p else: if opts.get(b'parent'): raise error.Abort(_(b'cannot use --parent on non-merge changeset')) parent = p1 # the backout should appear on the same branch branch = repo.dirstate.branch() bheads = repo.branchheads(branch) rctx = scmutil.revsingle(repo, hex(parent)) if not opts.get(b'merge') and op1 != node: with dirstateguard.dirstateguard(repo, b'backout'): overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} with ui.configoverride(overrides, b'backout'): stats = mergemod.back_out(ctx, parent=repo[parent]) repo.setparents(op1, op2) hg._showstats(repo, stats) if stats.unresolvedcount: repo.ui.status( _(b"use 'hg resolve' to retry unresolved file merges\n") ) return 1 else: hg.clean(repo, node, show_stats=False) repo.dirstate.setbranch(branch) cmdutil.revert(ui, repo, rctx) if opts.get(b'no_commit'): msg = _(b"changeset %s backed out, don't forget to commit.\n") ui.status(msg % short(node)) return 0 def commitfunc(ui, repo, message, match, opts): editform = b'backout' e = cmdutil.getcommiteditor( editform=editform, **pycompat.strkwargs(opts) ) if not message: # we don't translate commit messages message = b"Backed out changeset %s" % short(node) e = cmdutil.getcommiteditor(edit=True, editform=editform) return repo.commit( message, opts.get(b'user'), opts.get(b'date'), match, editor=e ) newnode = cmdutil.commit(ui, repo, commitfunc, [], opts) if not newnode: ui.status(_(b"nothing changed\n")) return 1 cmdutil.commitstatus(repo, newnode, branch, bheads) def nice(node): return b'%d:%s' % (repo.changelog.rev(node), short(node)) ui.status( _(b'changeset %s backs out changeset %s\n') % (nice(repo.changelog.tip()), nice(node)) ) if opts.get(b'merge') and op1 != node: hg.clean(repo, op1, show_stats=False) ui.status( _(b'merging with changeset %s\n') % nice(repo.changelog.tip()) ) overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} with ui.configoverride(overrides, b'backout'): return hg.merge(repo[b'tip']) return 0 @command( b'bisect', [ (b'r', b'reset', False, _(b'reset bisect state')), (b'g', b'good', False, _(b'mark changeset good')), (b'b', b'bad', False, _(b'mark changeset bad')), (b's', b'skip', False, _(b'skip testing changeset')), (b'e', b'extend', False, _(b'extend the bisect range')), ( b'c', b'command', b'', _(b'use command to check changeset state'), _(b'CMD'), ), (b'U', b'noupdate', False, _(b'do not update to target')), ], _(b"[-gbsr] [-U] [-c CMD] [REV]"), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, ) def bisect( ui, repo, rev=None, extra=None, command=None, reset=None, good=None, bad=None, skip=None, extend=None, noupdate=None, ): """subdivision search of changesets This command helps to find changesets which introduce problems. To use, mark the earliest changeset you know exhibits the problem as bad, then mark the latest changeset which is free from the problem as good. Bisect will update your working directory to a revision for testing (unless the -U/--noupdate option is specified). Once you have performed tests, mark the working directory as good or bad, and bisect will either update to another candidate changeset or announce that it has found the bad revision. As a shortcut, you can also use the revision argument to mark a revision as good or bad without checking it out first. If you supply a command, it will be used for automatic bisection. The environment variable HG_NODE will contain the ID of the changeset being tested. The exit status of the command will be used to mark revisions as good or bad: status 0 means good, 125 means to skip the revision, 127 (command not found) will abort the bisection, and any other non-zero exit status means the revision is bad. .. container:: verbose Some examples: - start a bisection with known bad revision 34, and good revision 12:: hg bisect --bad 34 hg bisect --good 12 - advance the current bisection by marking current revision as good or bad:: hg bisect --good hg bisect --bad - mark the current revision, or a known revision, to be skipped (e.g. if that revision is not usable because of another issue):: hg bisect --skip hg bisect --skip 23 - skip all revisions that do not touch directories ``foo`` or ``bar``:: hg bisect --skip "!( file('path:foo') & file('path:bar') )" - forget the current bisection:: hg bisect --reset - use 'make && make tests' to automatically find the first broken revision:: hg bisect --reset hg bisect --bad 34 hg bisect --good 12 hg bisect --command "make && make tests" - see all changesets whose states are already known in the current bisection:: hg log -r "bisect(pruned)" - see the changeset currently being bisected (especially useful if running with -U/--noupdate):: hg log -r "bisect(current)" - see all changesets that took part in the current bisection:: hg log -r "bisect(range)" - you can even get a nice graph:: hg log --graph -r "bisect(range)" See :hg:`help revisions.bisect` for more about the `bisect()` predicate. Returns 0 on success. """ # backward compatibility if rev in b"good bad reset init".split(): ui.warn(_(b"(use of 'hg bisect ' is deprecated)\n")) cmd, rev, extra = rev, extra, None if cmd == b"good": good = True elif cmd == b"bad": bad = True else: reset = True elif extra: raise error.Abort(_(b'incompatible arguments')) incompatibles = { b'--bad': bad, b'--command': bool(command), b'--extend': extend, b'--good': good, b'--reset': reset, b'--skip': skip, } enabled = [x for x in incompatibles if incompatibles[x]] if len(enabled) > 1: raise error.Abort( _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2]) ) if reset: hbisect.resetstate(repo) return state = hbisect.load_state(repo) # update state if good or bad or skip: if rev: nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])] else: nodes = [repo.lookup(b'.')] if good: state[b'good'] += nodes elif bad: state[b'bad'] += nodes elif skip: state[b'skip'] += nodes hbisect.save_state(repo, state) if not (state[b'good'] and state[b'bad']): return def mayupdate(repo, node, show_stats=True): """common used update sequence""" if noupdate: return cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) return hg.clean(repo, node, show_stats=show_stats) displayer = logcmdutil.changesetdisplayer(ui, repo, {}) if command: changesets = 1 if noupdate: try: node = state[b'current'][0] except LookupError: raise error.Abort( _( b'current bisect revision is unknown - ' b'start a new bisect to fix' ) ) else: node, p2 = repo.dirstate.parents() if p2 != nullid: raise error.Abort(_(b'current bisect revision is a merge')) if rev: node = repo[scmutil.revsingle(repo, rev, node)].node() with hbisect.restore_state(repo, state, node): while changesets: # update state state[b'current'] = [node] hbisect.save_state(repo, state) status = ui.system( command, environ={b'HG_NODE': hex(node)}, blockedtag=b'bisect_check', ) if status == 125: transition = b"skip" elif status == 0: transition = b"good" # status < 0 means process was killed elif status == 127: raise error.Abort(_(b"failed to execute %s") % command) elif status < 0: raise error.Abort(_(b"%s killed") % command) else: transition = b"bad" state[transition].append(node) ctx = repo[node] ui.status( _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition) ) hbisect.checkstate(state) # bisect nodes, changesets, bgood = hbisect.bisect(repo, state) # update to next check node = nodes[0] mayupdate(repo, node, show_stats=False) hbisect.printresult(ui, repo, state, displayer, nodes, bgood) return hbisect.checkstate(state) # actually bisect nodes, changesets, good = hbisect.bisect(repo, state) if extend: if not changesets: extendnode = hbisect.extendrange(repo, state, nodes, good) if extendnode is not None: ui.write( _(b"Extending search to changeset %d:%s\n") % (extendnode.rev(), extendnode) ) state[b'current'] = [extendnode.node()] hbisect.save_state(repo, state) return mayupdate(repo, extendnode.node()) raise error.Abort(_(b"nothing to extend")) if changesets == 0: hbisect.printresult(ui, repo, state, displayer, nodes, good) else: assert len(nodes) == 1 # only a single node can be tested next node = nodes[0] # compute the approximate number of remaining tests tests, size = 0, 2 while size <= changesets: tests, size = tests + 1, size * 2 rev = repo.changelog.rev(node) ui.write( _( b"Testing changeset %d:%s " b"(%d changesets remaining, ~%d tests)\n" ) % (rev, short(node), changesets, tests) ) state[b'current'] = [node] hbisect.save_state(repo, state) return mayupdate(repo, node) @command( b'bookmarks|bookmark', [ (b'f', b'force', False, _(b'force')), (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')), (b'd', b'delete', False, _(b'delete a given bookmark')), (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')), (b'i', b'inactive', False, _(b'mark a bookmark inactive')), (b'l', b'list', False, _(b'list existing bookmarks')), ] + formatteropts, _(b'hg bookmarks [OPTIONS]... [NAME]...'), helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, ) def bookmark(ui, repo, *names, **opts): '''create a new bookmark or list existing bookmarks Bookmarks are labels on changesets to help track lines of development. Bookmarks are unversioned and can be moved, renamed and deleted. Deleting or moving a bookmark has no effect on the associated changesets. Creating or updating to a bookmark causes it to be marked as 'active'. The active bookmark is indicated with a '*'. When a commit is made, the active bookmark will advance to the new commit. A plain :hg:`update` will also advance an active bookmark, if possible. Updating away from a bookmark will cause it to be deactivated. Bookmarks can be pushed and pulled between repositories (see :hg:`help push` and :hg:`help pull`). If a shared bookmark has diverged, a new 'divergent bookmark' of the form 'name@path' will be created. Using :hg:`merge` will resolve the divergence. Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying the active bookmark's name. A bookmark named '@' has the special property that :hg:`clone` will check it out by default if it exists. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions such as ``{bookmark}``. See also :hg:`help templates`. :active: Boolean. True if the bookmark is active. Examples: - create an active bookmark for a new line of development:: hg book new-feature - create an inactive bookmark as a place marker:: hg book -i reviewed - create an inactive bookmark on another changeset:: hg book -r .^ tested - rename bookmark turkey to dinner:: hg book -m turkey dinner - move the '@' bookmark from another branch:: hg book -f @ - print only the active bookmark name:: hg book -ql . ''' opts = pycompat.byteskwargs(opts) force = opts.get(b'force') rev = opts.get(b'rev') inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list') if action: cmdutil.check_incompatible_arguments(opts, action, [b'rev']) elif names or rev: action = b'add' elif inactive: action = b'inactive' # meaning deactivate else: action = b'list' cmdutil.check_incompatible_arguments( opts, b'inactive', [b'delete', b'list'] ) if not names and action in {b'add', b'delete'}: raise error.Abort(_(b"bookmark name required")) if action in {b'add', b'delete', b'rename', b'inactive'}: with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: if action == b'delete': names = pycompat.maplist(repo._bookmarks.expandname, names) bookmarks.delete(repo, tr, names) elif action == b'rename': if not names: raise error.Abort(_(b"new bookmark name required")) elif len(names) > 1: raise error.Abort(_(b"only one new bookmark name allowed")) oldname = repo._bookmarks.expandname(opts[b'rename']) bookmarks.rename(repo, tr, oldname, names[0], force, inactive) elif action == b'add': bookmarks.addbookmarks(repo, tr, names, rev, force, inactive) elif action == b'inactive': if len(repo._bookmarks) == 0: ui.status(_(b"no bookmarks set\n")) elif not repo._activebookmark: ui.status(_(b"no active bookmark\n")) else: bookmarks.deactivate(repo) elif action == b'list': names = pycompat.maplist(repo._bookmarks.expandname, names) with ui.formatter(b'bookmarks', opts) as fm: bookmarks.printbookmarks(ui, repo, fm, names) else: raise error.ProgrammingError(b'invalid action: %s' % action) @command( b'branch', [ ( b'f', b'force', None, _(b'set branch name even if it shadows an existing branch'), ), (b'C', b'clean', None, _(b'reset branch name to parent branch name')), ( b'r', b'rev', [], _(b'change branches of the given revs (EXPERIMENTAL)'), ), ], _(b'[-fC] [NAME]'), helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, ) def branch(ui, repo, label=None, **opts): """set or show the current branch name .. note:: Branch names are permanent and global. Use :hg:`bookmark` to create a light-weight bookmark instead. See :hg:`help glossary` for more information about named branches and bookmarks. With no argument, show the current branch name. With one argument, set the working directory branch name (the branch will not exist in the repository until the next commit). Standard practice recommends that primary development take place on the 'default' branch. Unless -f/--force is specified, branch will not let you set a branch name that already exists. Use -C/--clean to reset the working directory branch to that of the parent of the working directory, negating a previous branch change. Use the command :hg:`update` to switch to an existing branch. Use :hg:`commit --close-branch` to mark this branch head as closed. When all heads of a branch are closed, the branch will be considered closed. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) revs = opts.get(b'rev') if label: label = label.strip() if not opts.get(b'clean') and not label: if revs: raise error.Abort(_(b"no branch name specified for the revisions")) ui.write(b"%s\n" % repo.dirstate.branch()) return with repo.wlock(): if opts.get(b'clean'): label = repo[b'.'].branch() repo.dirstate.setbranch(label) ui.status(_(b'reset working directory to branch %s\n') % label) elif label: scmutil.checknewlabel(repo, label, b'branch') if revs: return cmdutil.changebranch(ui, repo, revs, label, opts) if not opts.get(b'force') and label in repo.branchmap(): if label not in [p.branch() for p in repo[None].parents()]: raise error.Abort( _(b'a branch of the same name already exists'), # i18n: "it" refers to an existing branch hint=_(b"use 'hg update' to switch to it"), ) repo.dirstate.setbranch(label) ui.status(_(b'marked working directory as branch %s\n') % label) # find any open named branches aside from default for n, h, t, c in repo.branchmap().iterbranches(): if n != b"default" and not c: return 0 ui.status( _( b'(branches are permanent and global, ' b'did you want a bookmark?)\n' ) ) @command( b'branches', [ ( b'a', b'active', False, _(b'show only branches that have unmerged heads (DEPRECATED)'), ), (b'c', b'closed', False, _(b'show normal and closed branches')), (b'r', b'rev', [], _(b'show branch name(s) of the given rev')), ] + formatteropts, _(b'[-c]'), helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, intents={INTENT_READONLY}, ) def branches(ui, repo, active=False, closed=False, **opts): """list repository named branches List the repository's named branches, indicating which ones are inactive. If -c/--closed is specified, also list branches which have been marked closed (see :hg:`commit --close-branch`). Use the command :hg:`update` to switch to an existing branch. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions such as ``{branch}``. See also :hg:`help templates`. :active: Boolean. True if the branch is active. :closed: Boolean. True if the branch is closed. :current: Boolean. True if it is the current branch. Returns 0. """ opts = pycompat.byteskwargs(opts) revs = opts.get(b'rev') selectedbranches = None if revs: revs = scmutil.revrange(repo, revs) getbi = repo.revbranchcache().branchinfo selectedbranches = {getbi(r)[0] for r in revs} ui.pager(b'branches') fm = ui.formatter(b'branches', opts) hexfunc = fm.hexfunc allheads = set(repo.heads()) branches = [] for tag, heads, tip, isclosed in repo.branchmap().iterbranches(): if selectedbranches is not None and tag not in selectedbranches: continue isactive = False if not isclosed: openheads = set(repo.branchmap().iteropen(heads)) isactive = bool(openheads & allheads) branches.append((tag, repo[tip], isactive, not isclosed)) branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True) for tag, ctx, isactive, isopen in branches: if active and not isactive: continue if isactive: label = b'branches.active' notice = b'' elif not isopen: if not closed: continue label = b'branches.closed' notice = _(b' (closed)') else: label = b'branches.inactive' notice = _(b' (inactive)') current = tag == repo.dirstate.branch() if current: label = b'branches.current' fm.startitem() fm.write(b'branch', b'%s', tag, label=label) rev = ctx.rev() padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0) fmt = b' ' * padsize + b' %d:%s' fm.condwrite( not ui.quiet, b'rev node', fmt, rev, hexfunc(ctx.node()), label=b'log.changeset changeset.%s' % ctx.phasestr(), ) fm.context(ctx=ctx) fm.data(active=isactive, closed=not isopen, current=current) if not ui.quiet: fm.plain(notice) fm.plain(b'\n') fm.end() @command( b'bundle', [ ( b'f', b'force', None, _(b'run even when the destination is unrelated'), ), ( b'r', b'rev', [], _(b'a changeset intended to be added to the destination'), _(b'REV'), ), ( b'b', b'branch', [], _(b'a specific branch you would like to bundle'), _(b'BRANCH'), ), ( b'', b'base', [], _(b'a base changeset assumed to be available at the destination'), _(b'REV'), ), (b'a', b'all', None, _(b'bundle all changesets in the repository')), ( b't', b'type', b'bzip2', _(b'bundle compression type to use'), _(b'TYPE'), ), ] + remoteopts, _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) def bundle(ui, repo, fname, dest=None, **opts): """create a bundle file Generate a bundle file containing data to be transferred to another repository. To create a bundle containing all changesets, use -a/--all (or --base null). Otherwise, hg assumes the destination will have all the nodes you specify with --base parameters. Otherwise, hg will assume the repository has all the nodes in destination, or default-push/default if no destination is specified, where destination is the repository you provide through DEST option. You can change bundle format with the -t/--type option. See :hg:`help bundlespec` for documentation on this format. By default, the most appropriate format is used and compression defaults to bzip2. The bundle file can then be transferred using conventional means and applied to another repository with the unbundle or pull command. This is useful when direct push and pull are not available or when exporting an entire repository is undesirable. Applying bundles preserves all changeset contents including permissions, copy/rename information, and revision history. Returns 0 on success, 1 if no changes found. """ opts = pycompat.byteskwargs(opts) revs = None if b'rev' in opts: revstrings = opts[b'rev'] revs = scmutil.revrange(repo, revstrings) if revstrings and not revs: raise error.Abort(_(b'no commits to bundle')) bundletype = opts.get(b'type', b'bzip2').lower() try: bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False) except error.UnsupportedBundleSpecification as e: raise error.Abort( pycompat.bytestr(e), hint=_(b"see 'hg help bundlespec' for supported values for --type"), ) cgversion = bundlespec.contentopts[b"cg.version"] # Packed bundles are a pseudo bundle format for now. if cgversion == b's1': raise error.Abort( _(b'packed bundles cannot be produced by "hg bundle"'), hint=_(b"use 'hg debugcreatestreamclonebundle'"), ) if opts.get(b'all'): if dest: raise error.Abort( _(b"--all is incompatible with specifying a destination") ) if opts.get(b'base'): ui.warn(_(b"ignoring --base because --all was specified\n")) base = [nullrev] else: base = scmutil.revrange(repo, opts.get(b'base')) if cgversion not in changegroup.supportedoutgoingversions(repo): raise error.Abort( _(b"repository does not support bundle version %s") % cgversion ) if base: if dest: raise error.Abort( _(b"--base is incompatible with specifying a destination") ) common = [repo[rev].node() for rev in base] heads = [repo[r].node() for r in revs] if revs else None outgoing = discovery.outgoing(repo, common, heads) else: dest = ui.expandpath(dest or b'default-push', dest or b'default') dest, branches = hg.parseurl(dest, opts.get(b'branch')) other = hg.peer(repo, opts, dest) revs = [repo[r].hex() for r in revs] revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) heads = revs and pycompat.maplist(repo.lookup, revs) or revs outgoing = discovery.findcommonoutgoing( repo, other, onlyheads=heads, force=opts.get(b'force'), portable=True, ) if not outgoing.missing: scmutil.nochangesfound(ui, repo, not base and outgoing.excluded) return 1 if cgversion == b'01': # bundle1 bversion = b'HG10' + bundlespec.wirecompression bcompression = None elif cgversion in (b'02', b'03'): bversion = b'HG20' bcompression = bundlespec.wirecompression else: raise error.ProgrammingError( b'bundle: unexpected changegroup version %s' % cgversion ) # TODO compression options should be derived from bundlespec parsing. # This is a temporary hack to allow adjusting bundle compression # level without a) formalizing the bundlespec changes to declare it # b) introducing a command flag. compopts = {} complevel = ui.configint( b'experimental', b'bundlecomplevel.' + bundlespec.compression ) if complevel is None: complevel = ui.configint(b'experimental', b'bundlecomplevel') if complevel is not None: compopts[b'level'] = complevel # Allow overriding the bundling of obsmarker in phases through # configuration while we don't have a bundle version that include them if repo.ui.configbool(b'experimental', b'evolution.bundle-obsmarker'): bundlespec.contentopts[b'obsolescence'] = True if repo.ui.configbool(b'experimental', b'bundle-phases'): bundlespec.contentopts[b'phases'] = True bundle2.writenewbundle( ui, repo, b'bundle', fname, bversion, outgoing, bundlespec.contentopts, compression=bcompression, compopts=compopts, ) @command( b'cat', [ ( b'o', b'output', b'', _(b'print output to file with formatted name'), _(b'FORMAT'), ), (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')), (b'', b'decode', None, _(b'apply any matching decode filter')), ] + walkopts + formatteropts, _(b'[OPTION]... FILE...'), helpcategory=command.CATEGORY_FILE_CONTENTS, inferrepo=True, intents={INTENT_READONLY}, ) def cat(ui, repo, file1, *pats, **opts): """output the current or given revision of files Print the specified files as they were at the given revision. If no revision is given, the parent of the working directory is used. Output may be to a file, in which case the name of the file is given using a template string. See :hg:`help templates`. In addition to the common template keywords, the following formatting rules are supported: :``%%``: literal "%" character :``%s``: basename of file being printed :``%d``: dirname of file being printed, or '.' if in repository root :``%p``: root-relative path name of file being printed :``%H``: changeset hash (40 hexadecimal digits) :``%R``: changeset revision number :``%h``: short-form changeset hash (12 hexadecimal digits) :``%r``: zero-padded changeset revision number :``%b``: basename of the exporting repository :``\\``: literal "\\" character .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :data: String. File content. :path: String. Repository-absolute path of the file. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev) m = scmutil.match(ctx, (file1,) + pats, opts) fntemplate = opts.pop(b'output', b'') if cmdutil.isstdiofilename(fntemplate): fntemplate = b'' if fntemplate: fm = formatter.nullformatter(ui, b'cat', opts) else: ui.pager(b'cat') fm = ui.formatter(b'cat', opts) with fm: return cmdutil.cat( ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts) ) @command( b'clone', [ ( b'U', b'noupdate', None, _( b'the clone will include an empty working ' b'directory (only a repository)' ), ), ( b'u', b'updaterev', b'', _(b'revision, tag, or branch to check out'), _(b'REV'), ), ( b'r', b'rev', [], _( b'do not clone everything, but include this changeset' b' and its ancestors' ), _(b'REV'), ), ( b'b', b'branch', [], _( b'do not clone everything, but include this branch\'s' b' changesets and their ancestors' ), _(b'BRANCH'), ), (b'', b'pull', None, _(b'use pull protocol to copy metadata')), (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')), (b'', b'stream', None, _(b'clone with minimal data processing')), ] + remoteopts, _(b'[OPTION]... SOURCE [DEST]'), helpcategory=command.CATEGORY_REPO_CREATION, helpbasic=True, norepo=True, ) def clone(ui, source, dest=None, **opts): """make a copy of an existing repository Create a copy of an existing repository in a new directory. If no destination directory name is specified, it defaults to the basename of the source. The location of the source is added to the new repository's ``.hg/hgrc`` file, as the default to be used for future pulls. Only local paths and ``ssh://`` URLs are supported as destinations. For ``ssh://`` destinations, no working directory or ``.hg/hgrc`` will be created on the remote side. If the source repository has a bookmark called '@' set, that revision will be checked out in the new repository by default. To check out a particular version, use -u/--update, or -U/--noupdate to create a clone with no working directory. To pull only a subset of changesets, specify one or more revisions identifiers with -r/--rev or branches with -b/--branch. The resulting clone will contain only the specified changesets and their ancestors. These options (or 'clone src#rev dest') imply --pull, even for local source repositories. In normal clone mode, the remote normalizes repository data into a common exchange format and the receiving end translates this data into its local storage format. --stream activates a different clone mode that essentially copies repository files from the remote with minimal data processing. This significantly reduces the CPU cost of a clone both remotely and locally. However, it often increases the transferred data size by 30-40%. This can result in substantially faster clones where I/O throughput is plentiful, especially for larger repositories. A side-effect of --stream clones is that storage settings and requirements on the remote are applied locally: a modern client may inherit legacy or inefficient storage used by the remote or a legacy Mercurial client may not be able to clone from a modern Mercurial remote. .. note:: Specifying a tag will include the tagged changeset but not the changeset containing the tag. .. container:: verbose For efficiency, hardlinks are used for cloning whenever the source and destination are on the same filesystem (note this applies only to the repository data, not to the working directory). Some filesystems, such as AFS, implement hardlinking incorrectly, but do not report errors. In these cases, use the --pull option to avoid hardlinking. Mercurial will update the working directory to the first applicable revision from this list: a) null if -U or the source repository has no changesets b) if -u . and the source repository is local, the first parent of the source repository's working directory c) the changeset specified with -u (if a branch name, this means the latest head of that branch) d) the changeset specified with -r e) the tipmost head specified with -b f) the tipmost head specified with the url#branch source syntax g) the revision marked with the '@' bookmark, if present h) the tipmost head of the default branch i) tip When cloning from servers that support it, Mercurial may fetch pre-generated data from a server-advertised URL or inline from the same stream. When this is done, hooks operating on incoming changesets and changegroups may fire more than once, once for each pre-generated bundle and as well as for any additional remaining data. In addition, if an error occurs, the repository may be rolled back to a partial clone. This behavior may change in future releases. See :hg:`help -e clonebundles` for more. Examples: - clone a remote repository to a new directory named hg/:: hg clone https://www.mercurial-scm.org/repo/hg/ - create a lightweight local clone:: hg clone project/ project-feature/ - clone from an absolute path on an ssh server (note double-slash):: hg clone ssh://user@server//home/projects/alpha/ - do a streaming clone while checking out a specified version:: hg clone --stream http://server/repo -u 1.5 - create a repository without changesets after a particular revision:: hg clone -r 04e544 experimental/ good/ - clone (and track) a particular named branch:: hg clone https://www.mercurial-scm.org/repo/hg/#stable See :hg:`help urls` for details on specifying URLs. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev') # --include/--exclude can come from narrow or sparse. includepats, excludepats = None, None # hg.clone() differentiates between None and an empty set. So make sure # patterns are sets if narrow is requested without patterns. if opts.get(b'narrow'): includepats = set() excludepats = set() if opts.get(b'include'): includepats = narrowspec.parsepatterns(opts.get(b'include')) if opts.get(b'exclude'): excludepats = narrowspec.parsepatterns(opts.get(b'exclude')) r = hg.clone( ui, opts, source, dest, pull=opts.get(b'pull'), stream=opts.get(b'stream') or opts.get(b'uncompressed'), revs=opts.get(b'rev'), update=opts.get(b'updaterev') or not opts.get(b'noupdate'), branch=opts.get(b'branch'), shareopts=opts.get(b'shareopts'), storeincludepats=includepats, storeexcludepats=excludepats, depth=opts.get(b'depth') or None, ) return r is None @command( b'commit|ci', [ ( b'A', b'addremove', None, _(b'mark new/missing files as added/removed before committing'), ), (b'', b'close-branch', None, _(b'mark a branch head as closed')), (b'', b'amend', None, _(b'amend the parent of the working directory')), (b's', b'secret', None, _(b'use the secret phase for committing')), (b'e', b'edit', None, _(b'invoke editor on commit messages')), ( b'', b'force-close-branch', None, _(b'forcibly close branch from a non-head changeset (ADVANCED)'), ), (b'i', b'interactive', None, _(b'use interactive mode')), ] + walkopts + commitopts + commitopts2 + subrepoopts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_COMMITTING, helpbasic=True, inferrepo=True, ) def commit(ui, repo, *pats, **opts): """commit the specified files or all outstanding changes Commit changes to the given files into the repository. Unlike a centralized SCM, this operation is a local operation. See :hg:`push` for a way to actively distribute your changes. If a list of files is omitted, all changes reported by :hg:`status` will be committed. If you are committing the result of a merge, do not provide any filenames or -I/-X filters. If no commit message is specified, Mercurial starts your configured editor where you can enter a message. In case your commit fails, you will find a backup of your message in ``.hg/last-message.txt``. The --close-branch flag can be used to mark the current branch head closed. When all heads of a branch are closed, the branch will be considered closed and no longer listed. The --amend flag can be used to amend the parent of the working directory with a new commit that contains the changes in the parent in addition to those currently reported by :hg:`status`, if there are any. The old commit is stored in a backup bundle in ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle` on how to restore it). Message, user and date are taken from the amended commit unless specified. When a message isn't specified on the command line, the editor will open with the message of the amended commit. It is not possible to amend public changesets (see :hg:`help phases`) or changesets that have children. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if nothing changed. .. container:: verbose Examples: - commit all files ending in .py:: hg commit --include "set:**.py" - commit all non-binary files:: hg commit --exclude "set:binary()" - amend the current commit and set the date to now:: hg commit --amend --date now """ with repo.wlock(), repo.lock(): return _docommit(ui, repo, *pats, **opts) def _docommit(ui, repo, *pats, **opts): if opts.get('interactive'): opts.pop('interactive') ret = cmdutil.dorecord( ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts ) # ret can be 0 (no changes to record) or the value returned by # commit(), 1 if nothing changed or None on success. return 1 if ret == 0 else ret opts = pycompat.byteskwargs(opts) if opts.get(b'subrepos'): cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'amend']) # Let --subrepos on the command line override config setting. ui.setconfig(b'ui', b'commitsubrepos', True, b'commit') cmdutil.checkunfinished(repo, commit=True) branch = repo[None].branch() bheads = repo.branchheads(branch) extra = {} if opts.get(b'close_branch') or opts.get(b'force_close_branch'): extra[b'close'] = b'1' if repo[b'.'].closesbranch(): raise error.Abort( _(b'current revision is already a branch closing head') ) elif not bheads: raise error.Abort(_(b'branch "%s" has no heads to close') % branch) elif ( branch == repo[b'.'].branch() and repo[b'.'].node() not in bheads and not opts.get(b'force_close_branch') ): hint = _( b'use --force-close-branch to close branch from a non-head' b' changeset' ) raise error.Abort(_(b'can only close branch heads'), hint=hint) elif opts.get(b'amend'): if ( repo[b'.'].p1().branch() != branch and repo[b'.'].p2().branch() != branch ): raise error.Abort(_(b'can only close branch heads')) if opts.get(b'amend'): if ui.configbool(b'ui', b'commitsubrepos'): raise error.Abort(_(b'cannot amend with ui.commitsubrepos enabled')) old = repo[b'.'] rewriteutil.precheck(repo, [old.rev()], b'amend') # Currently histedit gets confused if an amend happens while histedit # is in progress. Since we have a checkunfinished command, we are # temporarily honoring it. # # Note: eventually this guard will be removed. Please do not expect # this behavior to remain. if not obsolete.isenabled(repo, obsolete.createmarkersopt): cmdutil.checkunfinished(repo) node = cmdutil.amend(ui, repo, old, extra, pats, opts) if node == old.node(): ui.status(_(b"nothing changed\n")) return 1 else: def commitfunc(ui, repo, message, match, opts): overrides = {} if opts.get(b'secret'): overrides[(b'phases', b'new-commit')] = b'secret' baseui = repo.baseui with baseui.configoverride(overrides, b'commit'): with ui.configoverride(overrides, b'commit'): editform = cmdutil.mergeeditform( repo[None], b'commit.normal' ) editor = cmdutil.getcommiteditor( editform=editform, **pycompat.strkwargs(opts) ) return repo.commit( message, opts.get(b'user'), opts.get(b'date'), match, editor=editor, extra=extra, ) node = cmdutil.commit(ui, repo, commitfunc, pats, opts) if not node: stat = cmdutil.postcommitstatus(repo, pats, opts) if stat.deleted: ui.status( _( b"nothing changed (%d missing files, see " b"'hg status')\n" ) % len(stat.deleted) ) else: ui.status(_(b"nothing changed\n")) return 1 cmdutil.commitstatus(repo, node, branch, bheads, opts) if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'): status( ui, repo, modified=True, added=True, removed=True, deleted=True, unknown=True, subrepos=opts.get(b'subrepos'), ) @command( b'config|showconfig|debugconfig', [ (b'u', b'untrusted', None, _(b'show untrusted configuration options')), (b'e', b'edit', None, _(b'edit user config')), (b'l', b'local', None, _(b'edit repository config')), ( b'', b'shared', None, _(b'edit shared source repository config (EXPERIMENTAL)'), ), + (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')), (b'g', b'global', None, _(b'edit global config')), ] + formatteropts, _(b'[-u] [NAME]...'), helpcategory=command.CATEGORY_HELP, optionalrepo=True, intents={INTENT_READONLY}, ) def config(ui, repo, *values, **opts): """show combined config settings from all hgrc files With no arguments, print names and values of all config items. With one argument of the form section.name, print just the value of that config item. With multiple arguments, print names and values of all config items with matching section names or section.names. With --edit, start an editor on the user-level config file. With --global, edit the system-wide config file. With --local, edit the repository-level config file. With --debug, the source (filename and line number) is printed for each config item. See :hg:`help config` for more information about config files. .. container:: verbose + --non-shared flag is used to edit `.hg/hgrc-not-shared` config file. + This file is not shared across shares when in share-safe mode. + Template: The following keywords are supported. See also :hg:`help templates`. :name: String. Config name. :source: String. Filename and line number where the item is defined. :value: String. Config value. The --shared flag can be used to edit the config file of shared source repository. It only works when you have shared using the experimental share safe feature. Returns 0 on success, 1 if NAME does not exist. """ opts = pycompat.byteskwargs(opts) - editopts = (b'edit', b'local', b'global', b'shared') + editopts = (b'edit', b'local', b'global', b'shared', b'non_shared') if any(opts.get(o) for o in editopts): cmdutil.check_at_most_one_arg(opts, *editopts[1:]) if opts.get(b'local'): if not repo: raise error.Abort(_(b"can't use --local outside a repository")) paths = [repo.vfs.join(b'hgrc')] elif opts.get(b'global'): paths = rcutil.systemrcpath() elif opts.get(b'shared'): if not repo.shared(): raise error.Abort( _(b"repository is not shared; can't use --shared") ) if requirements.SHARESAFE_REQUIREMENT not in repo.requirements: raise error.Abort( _( b"share safe feature not unabled; " b"unable to edit shared source repository config" ) ) paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')] + elif opts.get(b'non_shared'): + paths = [repo.vfs.join(b'hgrc-not-shared')] else: paths = rcutil.userrcpath() for f in paths: if os.path.exists(f): break else: if opts.get(b'global'): samplehgrc = uimod.samplehgrcs[b'global'] elif opts.get(b'local'): samplehgrc = uimod.samplehgrcs[b'local'] else: samplehgrc = uimod.samplehgrcs[b'user'] f = paths[0] fp = open(f, b"wb") fp.write(util.tonativeeol(samplehgrc)) fp.close() editor = ui.geteditor() ui.system( b"%s \"%s\"" % (editor, f), onerr=error.Abort, errprefix=_(b"edit failed"), blockedtag=b'config_edit', ) return ui.pager(b'config') fm = ui.formatter(b'config', opts) for t, f in rcutil.rccomponents(): if t == b'path': ui.debug(b'read config from: %s\n' % f) elif t == b'resource': ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1])) elif t == b'items': # Don't print anything for 'items'. pass else: raise error.ProgrammingError(b'unknown rctype: %s' % t) untrusted = bool(opts.get(b'untrusted')) selsections = selentries = [] if values: selsections = [v for v in values if b'.' not in v] selentries = [v for v in values if b'.' in v] uniquesel = len(selentries) == 1 and not selsections selsections = set(selsections) selentries = set(selentries) matched = False for section, name, value in ui.walkconfig(untrusted=untrusted): source = ui.configsource(section, name, untrusted) value = pycompat.bytestr(value) defaultvalue = ui.configdefault(section, name) if fm.isplain(): source = source or b'none' value = value.replace(b'\n', b'\\n') entryname = section + b'.' + name if values and not (section in selsections or entryname in selentries): continue fm.startitem() fm.condwrite(ui.debugflag, b'source', b'%s: ', source) if uniquesel: fm.data(name=entryname) fm.write(b'value', b'%s\n', value) else: fm.write(b'name value', b'%s=%s\n', entryname, value) if formatter.isprintable(defaultvalue): fm.data(defaultvalue=defaultvalue) elif isinstance(defaultvalue, list) and all( formatter.isprintable(e) for e in defaultvalue ): fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value')) # TODO: no idea how to process unsupported defaultvalue types matched = True fm.end() if matched: return 0 return 1 @command( b'continue', dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, helpbasic=True, ) def continuecmd(ui, repo, **opts): """resumes an interrupted operation (EXPERIMENTAL) Finishes a multistep operation like graft, histedit, rebase, merge, and unshelve if they are in an interrupted state. use --dry-run/-n to dry run the command. """ dryrun = opts.get('dry_run') contstate = cmdutil.getunfinishedstate(repo) if not contstate: raise error.Abort(_(b'no operation in progress')) if not contstate.continuefunc: raise error.Abort( ( _(b"%s in progress but does not support 'hg continue'") % (contstate._opname) ), hint=contstate.continuemsg(), ) if dryrun: ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname)) return return contstate.continuefunc(ui, repo) @command( b'copy|cp', [ (b'', b'forget', None, _(b'unmark a destination file as copied')), (b'A', b'after', None, _(b'record a copy that has already occurred')), ( b'', b'at-rev', b'', _(b'(un)mark copies in the given revision (EXPERIMENTAL)'), _(b'REV'), ), ( b'f', b'force', None, _(b'forcibly copy over an existing managed file'), ), ] + walkopts + dryrunopts, _(b'[OPTION]... SOURCE... DEST'), helpcategory=command.CATEGORY_FILE_CONTENTS, ) def copy(ui, repo, *pats, **opts): """mark files as copied for the next commit Mark dest as having copies of source files. If dest is a directory, copies are put in that directory. If dest is a file, the source must be a single file. By default, this command copies the contents of files as they exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. To undo marking a destination file as copied, use --forget. With that option, all given (positional) arguments are unmarked as copies. The destination file(s) will be left in place (still tracked). This command takes effect with the next commit by default. Returns 0 on success, 1 if errors are encountered. """ opts = pycompat.byteskwargs(opts) with repo.wlock(): return cmdutil.copy(ui, repo, pats, opts) @command( b'debugcommands', [], _(b'[COMMAND]'), helpcategory=command.CATEGORY_HELP, norepo=True, ) def debugcommands(ui, cmd=b'', *args): """list all available commands and options""" for cmd, vals in sorted(pycompat.iteritems(table)): cmd = cmd.split(b'|')[0] opts = b', '.join([i[1] for i in vals[1]]) ui.write(b'%s: %s\n' % (cmd, opts)) @command( b'debugcomplete', [(b'o', b'options', None, _(b'show the command options'))], _(b'[-o] CMD'), helpcategory=command.CATEGORY_HELP, norepo=True, ) def debugcomplete(ui, cmd=b'', **opts): """returns the completion list associated with the given command""" if opts.get('options'): options = [] otables = [globalopts] if cmd: aliases, entry = cmdutil.findcmd(cmd, table, False) otables.append(entry[1]) for t in otables: for o in t: if b"(DEPRECATED)" in o[3]: continue if o[0]: options.append(b'-%s' % o[0]) options.append(b'--%s' % o[1]) ui.write(b"%s\n" % b"\n".join(options)) return cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table) if ui.verbose: cmdlist = [b' '.join(c[0]) for c in cmdlist.values()] ui.write(b"%s\n" % b"\n".join(sorted(cmdlist))) @command( b'diff', [ (b'r', b'rev', [], _(b'revision'), _(b'REV')), (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')), ] + diffopts + diffopts2 + walkopts + subrepoopts, _(b'[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'), helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True, inferrepo=True, intents={INTENT_READONLY}, ) def diff(ui, repo, *pats, **opts): """diff repository (or selected files) Show differences between revisions for the specified files. Differences between files are shown using the unified diff format. .. note:: :hg:`diff` may generate unexpected results for merges, as it will default to comparing against the working directory's first parent changeset if no revisions are specified. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its first parent. Alternatively you can specify -c/--change with a revision to see the changes in that changeset relative to its first parent. Without the -a/--text option, diff will avoid generating diffs of files it detects as binary. With -a, diff will generate a diff anyway, probably with undesirable results. Use the -g/--git option to generate diffs in the git extended diff format. For more information, read :hg:`help diffs`. .. container:: verbose Examples: - compare a file in the current working directory to its parent:: hg diff foo.c - compare two historical versions of a directory, with rename info:: hg diff --git -r 1.0:1.2 lib/ - get change stats relative to the last change on some date:: hg diff --stat -r "date('may 2')" - diff all newly-added files that contain a keyword:: hg diff "set:added() and grep(GNU)" - compare a revision and its parents:: hg diff -c 9353 # compare against first parent hg diff -r 9353^:9353 # same using revset syntax hg diff -r 9353^2:9353 # compare against the second parent Returns 0 on success. """ cmdutil.check_at_most_one_arg(opts, 'rev', 'change') opts = pycompat.byteskwargs(opts) revs = opts.get(b'rev') change = opts.get(b'change') stat = opts.get(b'stat') reverse = opts.get(b'reverse') if change: repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn') ctx2 = scmutil.revsingle(repo, change, None) ctx1 = ctx2.p1() else: repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn') ctx1, ctx2 = scmutil.revpair(repo, revs) if reverse: ctxleft = ctx2 ctxright = ctx1 else: ctxleft = ctx1 ctxright = ctx2 diffopts = patch.diffallopts(ui, opts) m = scmutil.match(ctx2, pats, opts) m = repo.narrowmatch(m) ui.pager(b'diff') logcmdutil.diffordiffstat( ui, repo, diffopts, ctxleft, ctxright, m, stat=stat, listsubrepos=opts.get(b'subrepos'), root=opts.get(b'root'), ) @command( b'export', [ ( b'B', b'bookmark', b'', _(b'export changes only reachable by given bookmark'), _(b'BOOKMARK'), ), ( b'o', b'output', b'', _(b'print output to file with formatted name'), _(b'FORMAT'), ), (b'', b'switch-parent', None, _(b'diff against the second parent')), (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')), ] + diffopts + formatteropts, _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), helpcategory=command.CATEGORY_IMPORT_EXPORT, helpbasic=True, intents={INTENT_READONLY}, ) def export(ui, repo, *changesets, **opts): """dump the header and diffs for one or more changesets Print the changeset header and diffs for one or more revisions. If no revision is given, the parent of the working directory is used. The information shown in the changeset header is: author, date, branch name (if non-default), changeset hash, parent(s) and commit comment. .. note:: :hg:`export` may generate unexpected diff output for merge changesets, as it will compare the merge changeset against its first parent only. Output may be to a file, in which case the name of the file is given using a template string. See :hg:`help templates`. In addition to the common template keywords, the following formatting rules are supported: :``%%``: literal "%" character :``%H``: changeset hash (40 hexadecimal digits) :``%N``: number of patches being generated :``%R``: changeset revision number :``%b``: basename of the exporting repository :``%h``: short-form changeset hash (12 hexadecimal digits) :``%m``: first line of the commit message (only alphanumeric characters) :``%n``: zero-padded sequence number, starting at 1 :``%r``: zero-padded changeset revision number :``\\``: literal "\\" character Without the -a/--text option, export will avoid generating diffs of files it detects as binary. With -a, export will generate a diff anyway, probably with undesirable results. With -B/--bookmark changesets reachable by the given bookmark are selected. Use the -g/--git option to generate diffs in the git extended diff format. See :hg:`help diffs` for more information. With the --switch-parent option, the diff will be against the second parent. It can be useful to review a merge. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :diff: String. Diff content. :parents: List of strings. Parent nodes of the changeset. Examples: - use export and import to transplant a bugfix to the current branch:: hg export -r 9353 | hg import - - export all the changesets between two revisions to a file with rename information:: hg export --git -r 123:150 > changes.txt - split outgoing changes into a series of patches with descriptive names:: hg export -r "outgoing()" -o "%n-%m.patch" Returns 0 on success. """ opts = pycompat.byteskwargs(opts) bookmark = opts.get(b'bookmark') changesets += tuple(opts.get(b'rev', [])) cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark') if bookmark: if bookmark not in repo._bookmarks: raise error.Abort(_(b"bookmark '%s' not found") % bookmark) revs = scmutil.bookmarkrevs(repo, bookmark) else: if not changesets: changesets = [b'.'] repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn') revs = scmutil.revrange(repo, changesets) if not revs: raise error.Abort(_(b"export requires at least one changeset")) if len(revs) > 1: ui.note(_(b'exporting patches:\n')) else: ui.note(_(b'exporting patch:\n')) fntemplate = opts.get(b'output') if cmdutil.isstdiofilename(fntemplate): fntemplate = b'' if fntemplate: fm = formatter.nullformatter(ui, b'export', opts) else: ui.pager(b'export') fm = ui.formatter(b'export', opts) with fm: cmdutil.export( repo, revs, fm, fntemplate=fntemplate, switch_parent=opts.get(b'switch_parent'), opts=patch.diffallopts(ui, opts), ) @command( b'files', [ ( b'r', b'rev', b'', _(b'search the repository as it is in REV'), _(b'REV'), ), ( b'0', b'print0', None, _(b'end filenames with NUL, for use with xargs'), ), ] + walkopts + formatteropts + subrepoopts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, intents={INTENT_READONLY}, ) def files(ui, repo, *pats, **opts): """list tracked files Print files under Mercurial control in the working directory or specified revision for given files (excluding removed files). Files can be specified as filenames or filesets. If no files are given to match, this command prints the names of all files under Mercurial control. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :flags: String. Character denoting file's symlink and executable bits. :path: String. Repository-absolute path of the file. :size: Integer. Size of the file in bytes. Examples: - list all files under the current directory:: hg files . - shows sizes and flags for current revision:: hg files -vr . - list all files named README:: hg files -I "**/README" - list all binary files:: hg files "set:binary()" - find files containing a regular expression:: hg files "set:grep('bob')" - search tracked file contents with xargs and grep:: hg files -0 | xargs -0 grep foo See :hg:`help patterns` and :hg:`help filesets` for more information on specifying file patterns. Returns 0 if a match is found, 1 otherwise. """ opts = pycompat.byteskwargs(opts) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev, None) end = b'\n' if opts.get(b'print0'): end = b'\0' fmt = b'%s' + end m = scmutil.match(ctx, pats, opts) ui.pager(b'files') uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True) with ui.formatter(b'files', opts) as fm: return cmdutil.files( ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos') ) @command( b'forget', [(b'i', b'interactive', None, _(b'use interactive mode')),] + walkopts + dryrunopts, _(b'[OPTION]... FILE...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, inferrepo=True, ) def forget(ui, repo, *pats, **opts): """forget the specified files on the next commit Mark the specified files so they will no longer be tracked after the next commit. This only removes files from the current branch, not from the entire project history, and it does not delete them from the working directory. To delete the file from the working directory, see :hg:`remove`. To undo a forget before the next commit, see :hg:`add`. .. container:: verbose Examples: - forget newly-added binary files:: hg forget "set:added() and binary()" - forget files that would be excluded by .hgignore:: hg forget "set:hgignore()" Returns 0 on success. """ opts = pycompat.byteskwargs(opts) if not pats: raise error.Abort(_(b'no files specified')) m = scmutil.match(repo[None], pats, opts) dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive') uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) rejected = cmdutil.forget( ui, repo, m, prefix=b"", uipathfn=uipathfn, explicitonly=False, dryrun=dryrun, interactive=interactive, )[0] return rejected and 1 or 0 @command( b'graft', [ (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')), ( b'', b'base', b'', _(b'base revision when doing the graft merge (ADVANCED)'), _(b'REV'), ), (b'c', b'continue', False, _(b'resume interrupted graft')), (b'', b'stop', False, _(b'stop interrupted graft')), (b'', b'abort', False, _(b'abort interrupted graft')), (b'e', b'edit', False, _(b'invoke editor on commit messages')), (b'', b'log', None, _(b'append graft info to log message')), ( b'', b'no-commit', None, _(b"don't commit, just apply the changes in working directory"), ), (b'f', b'force', False, _(b'force graft')), ( b'D', b'currentdate', False, _(b'record the current date as commit date'), ), ( b'U', b'currentuser', False, _(b'record the current user as committer'), ), ] + commitopts2 + mergetoolopts + dryrunopts, _(b'[OPTION]... [-r REV]... REV...'), helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, ) def graft(ui, repo, *revs, **opts): '''copy changes from other branches onto the current branch This command uses Mercurial's merge logic to copy individual changes from other branches without merging branches in the history graph. This is sometimes known as 'backporting' or 'cherry-picking'. By default, graft will copy user, date, and description from the source changesets. Changesets that are ancestors of the current revision, that have already been grafted, or that are merges will be skipped. If --log is specified, log messages will have a comment appended of the form:: (grafted from CHANGESETHASH) If --force is specified, revisions will be grafted even if they are already ancestors of, or have been grafted to, the destination. This is useful when the revisions have since been backed out. If a graft merge results in conflicts, the graft process is interrupted so that the current merge can be manually resolved. Once all conflicts are addressed, the graft process can be continued with the -c/--continue option. The -c/--continue option reapplies all the earlier options. .. container:: verbose The --base option exposes more of how graft internally uses merge with a custom base revision. --base can be used to specify another ancestor than the first and only parent. The command:: hg graft -r 345 --base 234 is thus pretty much the same as:: hg diff -r 234 -r 345 | hg import but using merge to resolve conflicts and track moved files. The result of a merge can thus be backported as a single commit by specifying one of the merge parents as base, and thus effectively grafting the changes from the other side. It is also possible to collapse multiple changesets and clean up history by specifying another ancestor as base, much like rebase --collapse --keep. The commit message can be tweaked after the fact using commit --amend . For using non-ancestors as the base to backout changes, see the backout command and the hidden --parent option. .. container:: verbose Examples: - copy a single change to the stable branch and edit its description:: hg update stable hg graft --edit 9393 - graft a range of changesets with one exception, updating dates:: hg graft -D "2085::2093 and not 2091" - continue a graft after resolving conflicts:: hg graft -c - show the source of a grafted changeset:: hg log --debug -r . - show revisions sorted by date:: hg log -r "sort(all(), date)" - backport the result of a merge as a single commit:: hg graft -r 123 --base 123^ - land a feature branch as one changeset:: hg up -cr default hg graft -r featureX --base "ancestor('featureX', 'default')" See :hg:`help revisions` for more about specifying revisions. Returns 0 on successful completion, 1 if there are unresolved files. ''' with repo.wlock(): return _dograft(ui, repo, *revs, **opts) def _dograft(ui, repo, *revs, **opts): opts = pycompat.byteskwargs(opts) if revs and opts.get(b'rev'): ui.warn( _( b'warning: inconsistent use of --rev might give unexpected ' b'revision ordering!\n' ) ) revs = list(revs) revs.extend(opts.get(b'rev')) # a dict of data to be stored in state file statedata = {} # list of new nodes created by ongoing graft statedata[b'newnodes'] = [] cmdutil.resolvecommitoptions(ui, opts) editor = cmdutil.getcommiteditor( editform=b'graft', **pycompat.strkwargs(opts) ) cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue') cont = False if opts.get(b'no_commit'): cmdutil.check_incompatible_arguments( opts, b'no_commit', [b'edit', b'currentuser', b'currentdate', b'log'], ) graftstate = statemod.cmdstate(repo, b'graftstate') if opts.get(b'stop'): cmdutil.check_incompatible_arguments( opts, b'stop', [ b'edit', b'log', b'user', b'date', b'currentdate', b'currentuser', b'rev', ], ) return _stopgraft(ui, repo, graftstate) elif opts.get(b'abort'): cmdutil.check_incompatible_arguments( opts, b'abort', [ b'edit', b'log', b'user', b'date', b'currentdate', b'currentuser', b'rev', ], ) return cmdutil.abortgraft(ui, repo, graftstate) elif opts.get(b'continue'): cont = True if revs: raise error.Abort(_(b"can't specify --continue and revisions")) # read in unfinished revisions if graftstate.exists(): statedata = cmdutil.readgraftstate(repo, graftstate) if statedata.get(b'date'): opts[b'date'] = statedata[b'date'] if statedata.get(b'user'): opts[b'user'] = statedata[b'user'] if statedata.get(b'log'): opts[b'log'] = True if statedata.get(b'no_commit'): opts[b'no_commit'] = statedata.get(b'no_commit') if statedata.get(b'base'): opts[b'base'] = statedata.get(b'base') nodes = statedata[b'nodes'] revs = [repo[node].rev() for node in nodes] else: cmdutil.wrongtooltocontinue(repo, _(b'graft')) else: if not revs: raise error.Abort(_(b'no revisions specified')) cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) revs = scmutil.revrange(repo, revs) skipped = set() basectx = None if opts.get(b'base'): basectx = scmutil.revsingle(repo, opts[b'base'], None) if basectx is None: # check for merges for rev in repo.revs(b'%ld and merge()', revs): ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev) skipped.add(rev) revs = [r for r in revs if r not in skipped] if not revs: return -1 if basectx is not None and len(revs) != 1: raise error.Abort(_(b'only one revision allowed with --base ')) # Don't check in the --continue case, in effect retaining --force across # --continues. That's because without --force, any revisions we decided to # skip would have been filtered out here, so they wouldn't have made their # way to the graftstate. With --force, any revisions we would have otherwise # skipped would not have been filtered out, and if they hadn't been applied # already, they'd have been in the graftstate. if not (cont or opts.get(b'force')) and basectx is None: # check for ancestors of dest branch ancestors = repo.revs(b'%ld & (::.)', revs) for rev in ancestors: ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev])) revs = [r for r in revs if r not in ancestors] if not revs: return -1 # analyze revs for earlier grafts ids = {} for ctx in repo.set(b"%ld", revs): ids[ctx.hex()] = ctx.rev() n = ctx.extra().get(b'source') if n: ids[n] = ctx.rev() # check ancestors for earlier grafts ui.debug(b'scanning for duplicate grafts\n') # The only changesets we can be sure doesn't contain grafts of any # revs, are the ones that are common ancestors of *all* revs: for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs): ctx = repo[rev] n = ctx.extra().get(b'source') if n in ids: try: r = repo[n].rev() except error.RepoLookupError: r = None if r in revs: ui.warn( _( b'skipping revision %d:%s ' b'(already grafted to %d:%s)\n' ) % (r, repo[r], rev, ctx) ) revs.remove(r) elif ids[n] in revs: if r is None: ui.warn( _( b'skipping already grafted revision %d:%s ' b'(%d:%s also has unknown origin %s)\n' ) % (ids[n], repo[ids[n]], rev, ctx, n[:12]) ) else: ui.warn( _( b'skipping already grafted revision %d:%s ' b'(%d:%s also has origin %d:%s)\n' ) % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]) ) revs.remove(ids[n]) elif ctx.hex() in ids: r = ids[ctx.hex()] if r in revs: ui.warn( _( b'skipping already grafted revision %d:%s ' b'(was grafted from %d:%s)\n' ) % (r, repo[r], rev, ctx) ) revs.remove(r) if not revs: return -1 if opts.get(b'no_commit'): statedata[b'no_commit'] = True if opts.get(b'base'): statedata[b'base'] = opts[b'base'] for pos, ctx in enumerate(repo.set(b"%ld", revs)): desc = b'%d:%s "%s"' % ( ctx.rev(), ctx, ctx.description().split(b'\n', 1)[0], ) names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node()) if names: desc += b' (%s)' % b' '.join(names) ui.status(_(b'grafting %s\n') % desc) if opts.get(b'dry_run'): continue source = ctx.extra().get(b'source') extra = {} if source: extra[b'source'] = source extra[b'intermediate-source'] = ctx.hex() else: extra[b'source'] = ctx.hex() user = ctx.user() if opts.get(b'user'): user = opts[b'user'] statedata[b'user'] = user date = ctx.date() if opts.get(b'date'): date = opts[b'date'] statedata[b'date'] = date message = ctx.description() if opts.get(b'log'): message += b'\n(grafted from %s)' % ctx.hex() statedata[b'log'] = True # we don't merge the first commit when continuing if not cont: # perform the graft merge with p1(rev) as 'ancestor' overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} base = ctx.p1() if basectx is None else basectx with ui.configoverride(overrides, b'graft'): stats = mergemod.graft(repo, ctx, base, [b'local', b'graft']) # report any conflicts if stats.unresolvedcount > 0: # write out state for --continue nodes = [repo[rev].hex() for rev in revs[pos:]] statedata[b'nodes'] = nodes stateversion = 1 graftstate.save(stateversion, statedata) ui.error(_(b"abort: unresolved conflicts, can't continue\n")) ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n")) return 1 else: cont = False # commit if --no-commit is false if not opts.get(b'no_commit'): node = repo.commit( text=message, user=user, date=date, extra=extra, editor=editor ) if node is None: ui.warn( _(b'note: graft of %d:%s created no changes to commit\n') % (ctx.rev(), ctx) ) # checking that newnodes exist because old state files won't have it elif statedata.get(b'newnodes') is not None: statedata[b'newnodes'].append(node) # remove state when we complete successfully if not opts.get(b'dry_run'): graftstate.delete() return 0 def _stopgraft(ui, repo, graftstate): """stop the interrupted graft""" if not graftstate.exists(): raise error.Abort(_(b"no interrupted graft found")) pctx = repo[b'.'] mergemod.clean_update(pctx) graftstate.delete() ui.status(_(b"stopped the interrupted graft\n")) ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12]) return 0 statemod.addunfinished( b'graft', fname=b'graftstate', clearable=True, stopflag=True, continueflag=True, abortfunc=cmdutil.hgabortgraft, cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"), ) @command( b'grep', [ (b'0', b'print0', None, _(b'end fields with NUL')), (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')), ( b'', b'diff', None, _( b'search revision differences for when the pattern was added ' b'or removed' ), ), (b'a', b'text', None, _(b'treat all files as text')), ( b'f', b'follow', None, _( b'follow changeset history,' b' or file history across copies and renames' ), ), (b'i', b'ignore-case', None, _(b'ignore case when matching')), ( b'l', b'files-with-matches', None, _(b'print only filenames and revisions that match'), ), (b'n', b'line-number', None, _(b'print matching line numbers')), ( b'r', b'rev', [], _(b'search files changed within revision range'), _(b'REV'), ), ( b'', b'all-files', None, _( b'include all files in the changeset while grepping (DEPRECATED)' ), ), (b'u', b'user', None, _(b'list the author (long with -v)')), (b'd', b'date', None, _(b'list the date (short with -q)')), ] + formatteropts + walkopts, _(b'[--diff] [OPTION]... PATTERN [FILE]...'), helpcategory=command.CATEGORY_FILE_CONTENTS, inferrepo=True, intents={INTENT_READONLY}, ) def grep(ui, repo, pattern, *pats, **opts): """search for a pattern in specified files Search the working directory or revision history for a regular expression in the specified files for the entire repository. By default, grep searches the repository files in the working directory and prints the files where it finds a match. To specify historical revisions instead of the working directory, use the --rev flag. To search instead historical revision differences that contains a change in match status ("-" for a match that becomes a non-match, or "+" for a non-match that becomes a match), use the --diff flag. PATTERN can be any Python (roughly Perl-compatible) regular expression. If no FILEs are specified and the --rev flag isn't supplied, all files in the working directory are searched. When using the --rev flag and specifying FILEs, use the --follow argument to also follow the specified FILEs across renames and copies. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :change: String. Character denoting insertion ``+`` or removal ``-``. Available if ``--diff`` is specified. :lineno: Integer. Line number of the match. :path: String. Repository-absolute path of the file. :texts: List of text chunks. And each entry of ``{texts}`` provides the following sub-keywords. :matched: Boolean. True if the chunk matches the specified pattern. :text: String. Chunk content. See :hg:`help templates.operators` for the list expansion syntax. Returns 0 if a match is found, 1 otherwise. """ cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff']) opts = pycompat.byteskwargs(opts) diff = opts.get(b'all') or opts.get(b'diff') follow = opts.get(b'follow') if opts.get(b'all_files') is None and not diff: opts[b'all_files'] = True plaingrep = ( opts.get(b'all_files') and not opts.get(b'rev') and not opts.get(b'follow') ) all_files = opts.get(b'all_files') if plaingrep: opts[b'rev'] = [b'wdir()'] reflags = re.M if opts.get(b'ignore_case'): reflags |= re.I try: regexp = util.re.compile(pattern, reflags) except re.error as inst: ui.warn( _(b"grep: invalid match pattern: %s\n") % pycompat.bytestr(inst) ) return 1 sep, eol = b':', b'\n' if opts.get(b'print0'): sep = eol = b'\0' searcher = grepmod.grepsearcher( ui, repo, regexp, all_files=all_files, diff=diff, follow=follow ) getfile = searcher._getfile uipathfn = scmutil.getuipathfn(repo) def display(fm, fn, ctx, pstates, states): rev = scmutil.intrev(ctx) if fm.isplain(): formatuser = ui.shortuser else: formatuser = pycompat.bytestr if ui.quiet: datefmt = b'%Y-%m-%d' else: datefmt = b'%a %b %d %H:%M:%S %Y %1%2' found = False @util.cachefunc def binary(): flog = getfile(fn) try: return stringutil.binary(flog.read(ctx.filenode(fn))) except error.WdirUnsupported: return ctx[fn].isbinary() fieldnamemap = {b'linenumber': b'lineno'} if diff: iter = grepmod.difflinestates(pstates, states) else: iter = [(b'', l) for l in states] for change, l in iter: fm.startitem() fm.context(ctx=ctx) fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn) fm.plain(uipathfn(fn), label=b'grep.filename') cols = [ (b'rev', b'%d', rev, not plaingrep, b''), ( b'linenumber', b'%d', l.linenum, opts.get(b'line_number'), b'', ), ] if diff: cols.append( ( b'change', b'%s', change, True, b'grep.inserted ' if change == b'+' else b'grep.deleted ', ) ) cols.extend( [ ( b'user', b'%s', formatuser(ctx.user()), opts.get(b'user'), b'', ), ( b'date', b'%s', fm.formatdate(ctx.date(), datefmt), opts.get(b'date'), b'', ), ] ) for name, fmt, data, cond, extra_label in cols: if cond: fm.plain(sep, label=b'grep.sep') field = fieldnamemap.get(name, name) label = extra_label + (b'grep.%s' % name) fm.condwrite(cond, field, fmt, data, label=label) if not opts.get(b'files_with_matches'): fm.plain(sep, label=b'grep.sep') if not opts.get(b'text') and binary(): fm.plain(_(b" Binary file matches")) else: displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l) fm.plain(eol) found = True if opts.get(b'files_with_matches'): break return found def displaymatches(fm, l): p = 0 for s, e in l.findpos(regexp): if p < s: fm.startitem() fm.write(b'text', b'%s', l.line[p:s]) fm.data(matched=False) fm.startitem() fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match') fm.data(matched=True) p = e if p < len(l.line): fm.startitem() fm.write(b'text', b'%s', l.line[p:]) fm.data(matched=False) fm.end() found = False wopts = logcmdutil.walkopts( pats=pats, opts=opts, revspec=opts[b'rev'], include_pats=opts[b'include'], exclude_pats=opts[b'exclude'], follow=follow, force_changelog_traversal=all_files, filter_revisions_by_pats=not all_files, ) revs, makefilematcher = logcmdutil.makewalker(repo, wopts) ui.pager(b'grep') fm = ui.formatter(b'grep', opts) for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher): r = display(fm, fn, ctx, pstates, states) found = found or r if r and not diff and not all_files: searcher.skipfile(fn, ctx.rev()) fm.end() return not found @command( b'heads', [ ( b'r', b'rev', b'', _(b'show only heads which are descendants of STARTREV'), _(b'STARTREV'), ), (b't', b'topo', False, _(b'show topological heads only')), ( b'a', b'active', False, _(b'show active branchheads only (DEPRECATED)'), ), (b'c', b'closed', False, _(b'show normal and closed branch heads')), ] + templateopts, _(b'[-ct] [-r STARTREV] [REV]...'), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, intents={INTENT_READONLY}, ) def heads(ui, repo, *branchrevs, **opts): """show branch heads With no arguments, show all open branch heads in the repository. Branch heads are changesets that have no descendants on the same branch. They are where development generally takes place and are the usual targets for update and merge operations. If one or more REVs are given, only open branch heads on the branches associated with the specified changesets are shown. This means that you can use :hg:`heads .` to see the heads on the currently checked-out branch. If -c/--closed is specified, also show branch heads marked closed (see :hg:`commit --close-branch`). If STARTREV is specified, only those heads that are descendants of STARTREV will be displayed. If -t/--topo is specified, named branch mechanics will be ignored and only topological heads (changesets with no children) will be shown. Returns 0 if matching heads are found, 1 if not. """ opts = pycompat.byteskwargs(opts) start = None rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') start = scmutil.revsingle(repo, rev, None).node() if opts.get(b'topo'): heads = [repo[h] for h in repo.heads(start)] else: heads = [] for branch in repo.branchmap(): heads += repo.branchheads(branch, start, opts.get(b'closed')) heads = [repo[h] for h in heads] if branchrevs: branches = { repo[r].branch() for r in scmutil.revrange(repo, branchrevs) } heads = [h for h in heads if h.branch() in branches] if opts.get(b'active') and branchrevs: dagheads = repo.heads(start) heads = [h for h in heads if h.node() in dagheads] if branchrevs: haveheads = {h.branch() for h in heads} if branches - haveheads: headless = b', '.join(b for b in branches - haveheads) msg = _(b'no open branch heads found on branches %s') if opts.get(b'rev'): msg += _(b' (started at %s)') % opts[b'rev'] ui.warn((msg + b'\n') % headless) if not heads: return 1 ui.pager(b'heads') heads = sorted(heads, key=lambda x: -(x.rev())) displayer = logcmdutil.changesetdisplayer(ui, repo, opts) for ctx in heads: displayer.show(ctx) displayer.close() @command( b'help', [ (b'e', b'extension', None, _(b'show only help for extensions')), (b'c', b'command', None, _(b'show only help for commands')), (b'k', b'keyword', None, _(b'show topics matching keyword')), ( b's', b'system', [], _(b'show help for specific platform(s)'), _(b'PLATFORM'), ), ], _(b'[-eck] [-s PLATFORM] [TOPIC]'), helpcategory=command.CATEGORY_HELP, norepo=True, intents={INTENT_READONLY}, ) def help_(ui, name=None, **opts): """show help for a given topic or a help overview With no arguments, print a list of commands with short help messages. Given a topic, extension, or command name, print help for that topic. Returns 0 if successful. """ keep = opts.get('system') or [] if len(keep) == 0: if pycompat.sysplatform.startswith(b'win'): keep.append(b'windows') elif pycompat.sysplatform == b'OpenVMS': keep.append(b'vms') elif pycompat.sysplatform == b'plan9': keep.append(b'plan9') else: keep.append(b'unix') keep.append(pycompat.sysplatform.lower()) if ui.verbose: keep.append(b'verbose') commands = sys.modules[__name__] formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts) ui.pager(b'help') ui.write(formatted) @command( b'identify|id', [ (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')), (b'n', b'num', None, _(b'show local revision number')), (b'i', b'id', None, _(b'show global revision id')), (b'b', b'branch', None, _(b'show branch')), (b't', b'tags', None, _(b'show tags')), (b'B', b'bookmarks', None, _(b'show bookmarks')), ] + remoteopts + formatteropts, _(b'[-nibtB] [-r REV] [SOURCE]'), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, optionalrepo=True, intents={INTENT_READONLY}, ) def identify( ui, repo, source=None, rev=None, num=None, id=None, branch=None, tags=None, bookmarks=None, **opts ): """identify the working directory or specified revision Print a summary identifying the repository state at REV using one or two parent hash identifiers, followed by a "+" if the working directory has uncommitted changes, the branch name (if not default), a list of tags, and a list of bookmarks. When REV is not given, print a summary of the current state of the repository including the working directory. Specify -r. to get information of the working directory parent without scanning uncommitted changes. Specifying a path to a repository root or Mercurial bundle will cause lookup to operate on that repository/bundle. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :dirty: String. Character ``+`` denoting if the working directory has uncommitted changes. :id: String. One or two nodes, optionally followed by ``+``. :parents: List of strings. Parent nodes of the changeset. Examples: - generate a build identifier for the working directory:: hg id --id > build-id.dat - find the revision corresponding to a tag:: hg id -n -r 1.3 - check the most recent revision of a remote repository:: hg id -r tip https://www.mercurial-scm.org/repo/hg/ See :hg:`log` for generating more information about specific revisions, including full hash identifiers. Returns 0 if successful. """ opts = pycompat.byteskwargs(opts) if not repo and not source: raise error.Abort( _(b"there is no Mercurial repository here (.hg not found)") ) default = not (num or id or branch or tags or bookmarks) output = [] revs = [] if source: source, branches = hg.parseurl(ui.expandpath(source)) peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo repo = peer.local() revs, checkout = hg.addbranchrevs(repo, peer, branches, None) fm = ui.formatter(b'identify', opts) fm.startitem() if not repo: if num or branch or tags: raise error.Abort( _(b"can't query remote revision number, branch, or tags") ) if not rev and revs: rev = revs[0] if not rev: rev = b"tip" remoterev = peer.lookup(rev) hexrev = fm.hexfunc(remoterev) if default or id: output = [hexrev] fm.data(id=hexrev) @util.cachefunc def getbms(): bms = [] if b'bookmarks' in peer.listkeys(b'namespaces'): hexremoterev = hex(remoterev) bms = [ bm for bm, bmr in pycompat.iteritems( peer.listkeys(b'bookmarks') ) if bmr == hexremoterev ] return sorted(bms) if fm.isplain(): if bookmarks: output.extend(getbms()) elif default and not ui.quiet: # multiple bookmarks for a single parent separated by '/' bm = b'/'.join(getbms()) if bm: output.append(bm) else: fm.data(node=hex(remoterev)) if bookmarks or b'bookmarks' in fm.datahint(): fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark')) else: if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev, None) if ctx.rev() is None: ctx = repo[None] parents = ctx.parents() taglist = [] for p in parents: taglist.extend(p.tags()) dirty = b"" if ctx.dirty(missing=True, merge=False, branch=False): dirty = b'+' fm.data(dirty=dirty) hexoutput = [fm.hexfunc(p.node()) for p in parents] if default or id: output = [b"%s%s" % (b'+'.join(hexoutput), dirty)] fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty)) if num: numoutput = [b"%d" % p.rev() for p in parents] output.append(b"%s%s" % (b'+'.join(numoutput), dirty)) fm.data( parents=fm.formatlist( [fm.hexfunc(p.node()) for p in parents], name=b'node' ) ) else: hexoutput = fm.hexfunc(ctx.node()) if default or id: output = [hexoutput] fm.data(id=hexoutput) if num: output.append(pycompat.bytestr(ctx.rev())) taglist = ctx.tags() if default and not ui.quiet: b = ctx.branch() if b != b'default': output.append(b"(%s)" % b) # multiple tags for a single parent separated by '/' t = b'/'.join(taglist) if t: output.append(t) # multiple bookmarks for a single parent separated by '/' bm = b'/'.join(ctx.bookmarks()) if bm: output.append(bm) else: if branch: output.append(ctx.branch()) if tags: output.extend(taglist) if bookmarks: output.extend(ctx.bookmarks()) fm.data(node=ctx.hex()) fm.data(branch=ctx.branch()) fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':')) fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark')) fm.context(ctx=ctx) fm.plain(b"%s\n" % b' '.join(output)) fm.end() @command( b'import|patch', [ ( b'p', b'strip', 1, _( b'directory strip option for patch. This has the same ' b'meaning as the corresponding patch option' ), _(b'NUM'), ), (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')), (b'', b'secret', None, _(b'use the secret phase for committing')), (b'e', b'edit', False, _(b'invoke editor on commit messages')), ( b'f', b'force', None, _(b'skip check for outstanding uncommitted changes (DEPRECATED)'), ), ( b'', b'no-commit', None, _(b"don't commit, just update the working directory"), ), ( b'', b'bypass', None, _(b"apply patch without touching the working directory"), ), (b'', b'partial', None, _(b'commit even if some hunks fail')), (b'', b'exact', None, _(b'abort if patch would apply lossily')), (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')), ( b'', b'import-branch', None, _(b'use any branch information in patch (implied by --exact)'), ), ] + commitopts + commitopts2 + similarityopts, _(b'[OPTION]... PATCH...'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) def import_(ui, repo, patch1=None, *patches, **opts): """import an ordered set of patches Import a list of patches and commit them individually (unless --no-commit is specified). To read a patch from standard input (stdin), use "-" as the patch name. If a URL is specified, the patch will be downloaded from there. Import first applies changes to the working directory (unless --bypass is specified), import will abort if there are outstanding changes. Use --bypass to apply and commit patches directly to the repository, without affecting the working directory. Without --exact, patches will be applied on top of the working directory parent revision. You can import a patch straight from a mail message. Even patches as attachments work (to use the body part, it must have type text/plain or text/x-patch). From and Subject headers of email message are used as default committer and commit message. All text/plain body parts before first diff are added to the commit message. If the imported patch was generated by :hg:`export`, user and description from patch override values from message headers and body. Values given on command line with -m/--message and -u/--user override these. If --exact is specified, import will set the working directory to the parent of each patch before applying it, and will abort if the resulting changeset has a different ID than the one recorded in the patch. This will guard against various ways that portable patch formats and mail systems might fail to transfer Mercurial data or metadata. See :hg:`bundle` for lossless transmission. Use --partial to ensure a changeset will be created from the patch even if some hunks fail to apply. Hunks that fail to apply will be written to a .rej file. Conflicts can then be resolved by hand before :hg:`commit --amend` is run to update the created changeset. This flag exists to let people import patches that partially apply without losing the associated metadata (author, date, description, ...). .. note:: When no hunks apply cleanly, :hg:`import --partial` will create an empty changeset, importing only the patch metadata. With -s/--similarity, hg will attempt to discover renames and copies in the patch in the same way as :hg:`addremove`. It is possible to use external patch programs to perform the patch by setting the ``ui.patch`` configuration option. For the default internal tool, the fuzz can also be configured via ``patch.fuzz``. See :hg:`help config` for more information about configuration files and how to use these options. See :hg:`help dates` for a list of formats valid for -d/--date. .. container:: verbose Examples: - import a traditional patch from a website and detect renames:: hg import -s 80 http://example.com/bugfix.patch - import a changeset from an hgweb server:: hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa - import all the patches in an Unix-style mbox:: hg import incoming-patches.mbox - import patches from stdin:: hg import - - attempt to exactly restore an exported changeset (not always possible):: hg import --exact proposed-fix.patch - use an external tool to apply a patch which is too fuzzy for the default internal tool. hg import --config ui.patch="patch --merge" fuzzy.patch - change the default fuzzing from 2 to a less strict 7 hg import --config ui.fuzz=7 fuzz.patch Returns 0 on success, 1 on partial success (see --partial). """ cmdutil.check_incompatible_arguments( opts, 'no_commit', ['bypass', 'secret'] ) cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix']) opts = pycompat.byteskwargs(opts) if not patch1: raise error.Abort(_(b'need at least one patch to import')) patches = (patch1,) + patches date = opts.get(b'date') if date: opts[b'date'] = dateutil.parsedate(date) exact = opts.get(b'exact') update = not opts.get(b'bypass') try: sim = float(opts.get(b'similarity') or 0) except ValueError: raise error.Abort(_(b'similarity must be a number')) if sim < 0 or sim > 100: raise error.Abort(_(b'similarity must be between 0 and 100')) if sim and not update: raise error.Abort(_(b'cannot use --similarity with --bypass')) base = opts[b"base"] msgs = [] ret = 0 with repo.wlock(): if update: cmdutil.checkunfinished(repo) if exact or not opts.get(b'force'): cmdutil.bailifchanged(repo) if not opts.get(b'no_commit'): lock = repo.lock tr = lambda: repo.transaction(b'import') dsguard = util.nullcontextmanager else: lock = util.nullcontextmanager tr = util.nullcontextmanager dsguard = lambda: dirstateguard.dirstateguard(repo, b'import') with lock(), tr(), dsguard(): parents = repo[None].parents() for patchurl in patches: if patchurl == b'-': ui.status(_(b'applying patch from stdin\n')) patchfile = ui.fin patchurl = b'stdin' # for error message else: patchurl = os.path.join(base, patchurl) ui.status(_(b'applying %s\n') % patchurl) patchfile = hg.openpath(ui, patchurl, sendaccept=False) haspatch = False for hunk in patch.split(patchfile): with patch.extract(ui, hunk) as patchdata: msg, node, rej = cmdutil.tryimportone( ui, repo, patchdata, parents, opts, msgs, hg.clean ) if msg: haspatch = True ui.note(msg + b'\n') if update or exact: parents = repo[None].parents() else: parents = [repo[node]] if rej: ui.write_err(_(b"patch applied partially\n")) ui.write_err( _( b"(fix the .rej files and run " b"`hg commit --amend`)\n" ) ) ret = 1 break if not haspatch: raise error.Abort(_(b'%s: no diffs found') % patchurl) if msgs: repo.savecommitmessage(b'\n* * *\n'.join(msgs)) return ret @command( b'incoming|in', [ ( b'f', b'force', None, _(b'run even if remote repository is unrelated'), ), (b'n', b'newest-first', None, _(b'show newest record first')), (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')), ( b'r', b'rev', [], _(b'a remote changeset intended to be added'), _(b'REV'), ), (b'B', b'bookmarks', False, _(b"compare bookmarks")), ( b'b', b'branch', [], _(b'a specific branch you would like to pull'), _(b'BRANCH'), ), ] + logopts + remoteopts + subrepoopts, _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, ) def incoming(ui, repo, source=b"default", **opts): """show new changesets found in source Show new changesets found in the specified path/URL or the default pull location. These are the changesets that would have been pulled by :hg:`pull` at the time you issued this command. See pull for valid source format details. .. container:: verbose With -B/--bookmarks, the result of bookmark comparison between local and remote repositories is displayed. With -v/--verbose, status is also displayed for each bookmark like below:: BM1 01234567890a added BM2 1234567890ab advanced BM3 234567890abc diverged BM4 34567890abcd changed The action taken locally when pulling depends on the status of each bookmark: :``added``: pull will create it :``advanced``: pull will update it :``diverged``: pull will create a divergent bookmark :``changed``: result depends on remote changesets From the point of view of pulling behavior, bookmark existing only in the remote repository are treated as ``added``, even if it is in fact locally deleted. .. container:: verbose For remote repository, using --bundle avoids downloading the changesets twice if the incoming is followed by a pull. Examples: - show incoming changes with patches and full description:: hg incoming -vp - show incoming changes excluding merges, store a bundle:: hg in -vpM --bundle incoming.hg hg pull incoming.hg - briefly list changes inside a bundle:: hg in changes.hg -T "{desc|firstline}\\n" Returns 0 if there are incoming changes, 1 otherwise. """ opts = pycompat.byteskwargs(opts) if opts.get(b'graph'): logcmdutil.checkunsupportedgraphflags([], opts) def display(other, chlist, displayer): revdag = logcmdutil.graphrevs(other, chlist, opts) logcmdutil.displaygraph( ui, repo, revdag, displayer, graphmod.asciiedges ) hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True) return 0 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle']) if opts.get(b'bookmarks'): source, branches = hg.parseurl( ui.expandpath(source), opts.get(b'branch') ) other = hg.peer(repo, opts, source) if b'bookmarks' not in other.listkeys(b'namespaces'): ui.warn(_(b"remote doesn't support bookmarks\n")) return 0 ui.pager(b'incoming') ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) return bookmarks.incoming(ui, repo, other) repo._subtoppath = ui.expandpath(source) try: return hg.incoming(ui, repo, source, opts) finally: del repo._subtoppath @command( b'init', remoteopts, _(b'[-e CMD] [--remotecmd CMD] [DEST]'), helpcategory=command.CATEGORY_REPO_CREATION, helpbasic=True, norepo=True, ) def init(ui, dest=b".", **opts): """create a new repository in the given directory Initialize a new repository in the given directory. If the given directory does not exist, it will be created. If no directory is given, the current directory is used. It is possible to specify an ``ssh://`` URL as the destination. See :hg:`help urls` for more information. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) hg.peer(ui, opts, ui.expandpath(dest), create=True) @command( b'locate', [ ( b'r', b'rev', b'', _(b'search the repository as it is in REV'), _(b'REV'), ), ( b'0', b'print0', None, _(b'end filenames with NUL, for use with xargs'), ), ( b'f', b'fullpath', None, _(b'print complete paths from the filesystem root'), ), ] + walkopts, _(b'[OPTION]... [PATTERN]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def locate(ui, repo, *pats, **opts): """locate files matching specific patterns (DEPRECATED) Print files under Mercurial control in the working directory whose names match the given patterns. By default, this command searches all directories in the working directory. To search just the current directory and its subdirectories, use "--include .". If no patterns are given to match, this command prints the names of all files under Mercurial control in the working directory. If you want to feed the output of this command into the "xargs" command, use the -0 option to both this command and "xargs". This will avoid the problem of "xargs" treating single filenames that contain whitespace as multiple filenames. See :hg:`help files` for a more versatile command. Returns 0 if a match is found, 1 otherwise. """ opts = pycompat.byteskwargs(opts) if opts.get(b'print0'): end = b'\0' else: end = b'\n' ctx = scmutil.revsingle(repo, opts.get(b'rev'), None) ret = 1 m = scmutil.match( ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False ) ui.pager(b'locate') if ctx.rev() is None: # When run on the working copy, "locate" includes removed files, so # we get the list of files from the dirstate. filesgen = sorted(repo.dirstate.matches(m)) else: filesgen = ctx.matches(m) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats)) for abs in filesgen: if opts.get(b'fullpath'): ui.write(repo.wjoin(abs), end) else: ui.write(uipathfn(abs), end) ret = 0 return ret @command( b'log|history', [ ( b'f', b'follow', None, _( b'follow changeset history, or file history across copies and renames' ), ), ( b'', b'follow-first', None, _(b'only follow the first parent of merge changesets (DEPRECATED)'), ), ( b'd', b'date', b'', _(b'show revisions matching date spec'), _(b'DATE'), ), (b'C', b'copies', None, _(b'show copied files')), ( b'k', b'keyword', [], _(b'do case-insensitive search for a given text'), _(b'TEXT'), ), ( b'r', b'rev', [], _(b'show the specified revision or revset'), _(b'REV'), ), ( b'L', b'line-range', [], _(b'follow line range of specified file (EXPERIMENTAL)'), _(b'FILE,RANGE'), ), ( b'', b'removed', None, _(b'include revisions where files were removed'), ), ( b'm', b'only-merges', None, _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'), ), (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')), ( b'', b'only-branch', [], _( b'show only changesets within the given named branch (DEPRECATED)' ), _(b'BRANCH'), ), ( b'b', b'branch', [], _(b'show changesets within the given named branch'), _(b'BRANCH'), ), ( b'P', b'prune', [], _(b'do not display revision or any of its ancestors'), _(b'REV'), ), ] + logopts + walkopts, _(b'[OPTION]... [FILE]'), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, helpbasic=True, inferrepo=True, intents={INTENT_READONLY}, ) def log(ui, repo, *pats, **opts): """show revision history of entire repository or files Print the revision history of the specified files or the entire project. If no revision range is specified, the default is ``tip:0`` unless --follow is set, in which case the working directory parent is used as the starting revision. File history is shown without following rename or copy history of files. Use -f/--follow with a filename to follow history across renames and copies. --follow without a filename will only show ancestors of the starting revision. By default this command prints revision number and changeset id, tags, non-trivial parents, user, date and time, and a summary for each commit. When the -v/--verbose switch is used, the list of changed files and full commit message are shown. With --graph the revisions are shown as an ASCII art DAG with the most recent changeset at the top. 'o' is a changeset, '@' is a working directory parent, '%' is a changeset involved in an unresolved merge conflict, '_' closes a branch, 'x' is obsolete, '*' is unstable, and '+' represents a fork where the changeset from the lines below is a parent of the 'o' merge on the same line. Paths in the DAG are represented with '|', '/' and so forth. ':' in place of a '|' indicates one or more revisions in a path are omitted. .. container:: verbose Use -L/--line-range FILE,M:N options to follow the history of lines from M to N in FILE. With -p/--patch only diff hunks affecting specified line range will be shown. This option requires --follow; it can be specified multiple times. Currently, this option is not compatible with --graph. This option is experimental. .. note:: :hg:`log --patch` may generate unexpected diff output for merge changesets, as it will only compare the merge changeset against its first parent. Also, only files different from BOTH parents will appear in files:. .. note:: For performance reasons, :hg:`log FILE` may omit duplicate changes made on branches and will not show removals or mode changes. To see all such changes, use the --removed switch. .. container:: verbose .. note:: The history resulting from -L/--line-range options depends on diff options; for instance if white-spaces are ignored, respective changes with only white-spaces in specified line range will not be listed. .. container:: verbose Some examples: - changesets with full descriptions and file lists:: hg log -v - changesets ancestral to the working directory:: hg log -f - last 10 commits on the current branch:: hg log -l 10 -b . - changesets showing all modifications of a file, including removals:: hg log --removed file.c - all changesets that touch a directory, with diffs, excluding merges:: hg log -Mp lib/ - all revision numbers that match a keyword:: hg log -k bug --template "{rev}\\n" - the full hash identifier of the working directory parent:: hg log -r . --template "{node}\\n" - list available log templates:: hg log -T list - check if a given changeset is included in a tagged release:: hg log -r "a21ccf and ancestor(1.9)" - find all changesets by some user in a date range:: hg log -k alice -d "may 2008 to jul 2008" - summary of all changesets after the last tag:: hg log -r "last(tagged())::" --template "{desc|firstline}\\n" - changesets touching lines 13 to 23 for file.c:: hg log -L file.c,13:23 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of main.c with patch:: hg log -L file.c,13:23 -L main.c,2:6 -p See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help revisions` for more about specifying and ordering revisions. See :hg:`help templates` for more about pre-packaged styles and specifying custom templates. The default template used by the log command can be customized via the ``command-templates.log`` configuration setting. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) linerange = opts.get(b'line_range') if linerange and not opts.get(b'follow'): raise error.Abort(_(b'--line-range requires --follow')) if linerange and pats: # TODO: take pats as patterns with no line-range filter raise error.Abort( _(b'FILE arguments are not compatible with --line-range option') ) repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn') revs, differ = logcmdutil.getrevs( repo, logcmdutil.parseopts(ui, pats, opts) ) if linerange: # TODO: should follow file history from logcmdutil._initialrevs(), # then filter the result by logcmdutil._makerevset() and --limit revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts) getcopies = None if opts.get(b'copies'): endrev = None if revs: endrev = revs.max() + 1 getcopies = scmutil.getcopiesfn(repo, endrev=endrev) ui.pager(b'log') displayer = logcmdutil.changesetdisplayer( ui, repo, opts, differ, buffered=True ) if opts.get(b'graph'): displayfn = logcmdutil.displaygraphrevs else: displayfn = logcmdutil.displayrevs displayfn(ui, repo, revs, displayer, getcopies) @command( b'manifest', [ (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')), (b'', b'all', False, _(b"list files from all revisions")), ] + formatteropts, _(b'[-r REV]'), helpcategory=command.CATEGORY_MAINTENANCE, intents={INTENT_READONLY}, ) def manifest(ui, repo, node=None, rev=None, **opts): """output the current or given revision of the project manifest Print a list of version controlled files for the given revision. If no revision is given, the first parent of the working directory is used, or the null revision if no revision is checked out. With -v, print file permissions, symlink and executable bits. With --debug, print file revision hashes. If option --all is specified, the list of all files from all revisions is printed. This includes deleted and renamed files. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) fm = ui.formatter(b'manifest', opts) if opts.get(b'all'): if rev or node: raise error.Abort(_(b"can't specify a revision with --all")) res = set() for rev in repo: ctx = repo[rev] res |= set(ctx.files()) ui.pager(b'manifest') for f in sorted(res): fm.startitem() fm.write(b"path", b'%s\n', f) fm.end() return if rev and node: raise error.Abort(_(b"please specify just one revision")) if not node: node = rev char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'} mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'} if node: repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn') ctx = scmutil.revsingle(repo, node) mf = ctx.manifest() ui.pager(b'manifest') for f in ctx: fm.startitem() fm.context(ctx=ctx) fl = ctx[f].flags() fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f])) fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl]) fm.write(b'path', b'%s\n', f) fm.end() @command( b'merge', [ ( b'f', b'force', None, _(b'force a merge including outstanding changes (DEPRECATED)'), ), (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')), ( b'P', b'preview', None, _(b'review revisions to merge (no merge is performed)'), ), (b'', b'abort', None, _(b'abort the ongoing merge')), ] + mergetoolopts, _(b'[-P] [[-r] REV]'), helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, helpbasic=True, ) def merge(ui, repo, node=None, **opts): """merge another revision into working directory The current working directory is updated with all changes made in the requested revision since the last common predecessor revision. Files that changed between either parent are marked as changed for the next commit and a commit must be performed before any further updates to the repository are allowed. The next commit will have two parents. ``--tool`` can be used to specify the merge tool used for file merges. It overrides the HGMERGE environment variable and your configuration files. See :hg:`help merge-tools` for options. If no revision is specified, the working directory's parent is a head revision, and the current branch contains exactly one other head, the other head is merged with by default. Otherwise, an explicit revision with which to merge must be provided. See :hg:`help resolve` for information on handling file conflicts. To undo an uncommitted merge, use :hg:`merge --abort` which will check out a clean copy of the original merge parent, losing all changes. Returns 0 on success, 1 if there are unresolved files. """ opts = pycompat.byteskwargs(opts) abort = opts.get(b'abort') if abort and repo.dirstate.p2() == nullid: cmdutil.wrongtooltocontinue(repo, _(b'merge')) cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview']) if abort: state = cmdutil.getunfinishedstate(repo) if state and state._opname != b'merge': raise error.Abort( _(b'cannot abort merge with %s in progress') % (state._opname), hint=state.hint(), ) if node: raise error.Abort(_(b"cannot specify a node with --abort")) return hg.abortmerge(repo.ui, repo) if opts.get(b'rev') and node: raise error.Abort(_(b"please specify just one revision")) if not node: node = opts.get(b'rev') if node: ctx = scmutil.revsingle(repo, node) else: if ui.configbool(b'commands', b'merge.require-rev'): raise error.Abort( _( b'configuration requires specifying revision to merge ' b'with' ) ) ctx = repo[destutil.destmerge(repo)] if ctx.node() is None: raise error.Abort(_(b'merging with the working copy has no effect')) if opts.get(b'preview'): # find nodes that are ancestors of p2 but not of p1 p1 = repo[b'.'].node() p2 = ctx.node() nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) displayer = logcmdutil.changesetdisplayer(ui, repo, opts) for node in nodes: displayer.show(repo[node]) displayer.close() return 0 # ui.forcemerge is an internal variable, do not document overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} with ui.configoverride(overrides, b'merge'): force = opts.get(b'force') labels = [b'working copy', b'merge rev'] return hg.merge(ctx, force=force, labels=labels) statemod.addunfinished( b'merge', fname=None, clearable=True, allowcommit=True, cmdmsg=_(b'outstanding uncommitted merge'), abortfunc=hg.abortmerge, statushint=_( b'To continue: hg commit\nTo abort: hg merge --abort' ), cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"), ) @command( b'outgoing|out', [ ( b'f', b'force', None, _(b'run even when the destination is unrelated'), ), ( b'r', b'rev', [], _(b'a changeset intended to be included in the destination'), _(b'REV'), ), (b'n', b'newest-first', None, _(b'show newest record first')), (b'B', b'bookmarks', False, _(b'compare bookmarks')), ( b'b', b'branch', [], _(b'a specific branch you would like to push'), _(b'BRANCH'), ), ] + logopts + remoteopts + subrepoopts, _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, ) def outgoing(ui, repo, dest=None, **opts): """show changesets not found in the destination Show changesets not found in the specified destination repository or the default push location. These are the changesets that would be pushed if a push was requested. See pull for details of valid destination formats. .. container:: verbose With -B/--bookmarks, the result of bookmark comparison between local and remote repositories is displayed. With -v/--verbose, status is also displayed for each bookmark like below:: BM1 01234567890a added BM2 deleted BM3 234567890abc advanced BM4 34567890abcd diverged BM5 4567890abcde changed The action taken when pushing depends on the status of each bookmark: :``added``: push with ``-B`` will create it :``deleted``: push with ``-B`` will delete it :``advanced``: push will update it :``diverged``: push with ``-B`` will update it :``changed``: push with ``-B`` will update it From the point of view of pushing behavior, bookmarks existing only in the remote repository are treated as ``deleted``, even if it is in fact added remotely. Returns 0 if there are outgoing changes, 1 otherwise. """ # hg._outgoing() needs to re-resolve the path in order to handle #branch # style URLs, so don't overwrite dest. path = ui.paths.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( _(b'default repository not configured!'), hint=_(b"see 'hg help config.paths'"), ) opts = pycompat.byteskwargs(opts) if opts.get(b'graph'): logcmdutil.checkunsupportedgraphflags([], opts) o, other = hg._outgoing(ui, repo, dest, opts) if not o: cmdutil.outgoinghooks(ui, repo, other, opts, o) return revdag = logcmdutil.graphrevs(repo, o, opts) ui.pager(b'outgoing') displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True) logcmdutil.displaygraph( ui, repo, revdag, displayer, graphmod.asciiedges ) cmdutil.outgoinghooks(ui, repo, other, opts, o) return 0 if opts.get(b'bookmarks'): dest = path.pushloc or path.loc other = hg.peer(repo, opts, dest) if b'bookmarks' not in other.listkeys(b'namespaces'): ui.warn(_(b"remote doesn't support bookmarks\n")) return 0 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) ui.pager(b'outgoing') return bookmarks.outgoing(ui, repo, other) repo._subtoppath = path.pushloc or path.loc try: return hg.outgoing(ui, repo, dest, opts) finally: del repo._subtoppath @command( b'parents', [ ( b'r', b'rev', b'', _(b'show parents of the specified revision'), _(b'REV'), ), ] + templateopts, _(b'[-r REV] [FILE]'), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, inferrepo=True, ) def parents(ui, repo, file_=None, **opts): """show the parents of the working directory or revision (DEPRECATED) Print the working directory's parent revisions. If a revision is given via -r/--rev, the parent of that revision will be printed. If a file argument is given, the revision in which the file was last changed (before the working directory revision or the argument to --rev if given) is printed. This command is equivalent to:: hg log -r "p1()+p2()" or hg log -r "p1(REV)+p2(REV)" or hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))" See :hg:`summary` and :hg:`help revsets` for related information. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev, None) if file_: m = scmutil.match(ctx, (file_,), opts) if m.anypats() or len(m.files()) != 1: raise error.Abort(_(b'can only specify an explicit filename')) file_ = m.files()[0] filenodes = [] for cp in ctx.parents(): if not cp: continue try: filenodes.append(cp.filenode(file_)) except error.LookupError: pass if not filenodes: raise error.Abort(_(b"'%s' not found in manifest!") % file_) p = [] for fn in filenodes: fctx = repo.filectx(file_, fileid=fn) p.append(fctx.node()) else: p = [cp.node() for cp in ctx.parents()] displayer = logcmdutil.changesetdisplayer(ui, repo, opts) for n in p: if n != nullid: displayer.show(repo[n]) displayer.close() @command( b'paths', formatteropts, _(b'[NAME]'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, optionalrepo=True, intents={INTENT_READONLY}, ) def paths(ui, repo, search=None, **opts): """show aliases for remote repositories Show definition of symbolic path name NAME. If no name is given, show definition of all available names. Option -q/--quiet suppresses all output when searching for NAME and shows only the path names when listing all definitions. Path names are defined in the [paths] section of your configuration file and in ``/etc/mercurial/hgrc``. If run inside a repository, ``.hg/hgrc`` is used, too. The path names ``default`` and ``default-push`` have a special meaning. When performing a push or pull operation, they are used as fallbacks if no location is specified on the command-line. When ``default-push`` is set, it will be used for push and ``default`` will be used for pull; otherwise ``default`` is used as the fallback for both. When cloning a repository, the clone source is written as ``default`` in ``.hg/hgrc``. .. note:: ``default`` and ``default-push`` apply to all inbound (e.g. :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and :hg:`bundle`) operations. See :hg:`help urls` for more information. .. container:: verbose Template: The following keywords are supported. See also :hg:`help templates`. :name: String. Symbolic name of the path alias. :pushurl: String. URL for push operations. :url: String. URL or directory path for the other operations. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) ui.pager(b'paths') if search: pathitems = [ (name, path) for name, path in pycompat.iteritems(ui.paths) if name == search ] else: pathitems = sorted(pycompat.iteritems(ui.paths)) fm = ui.formatter(b'paths', opts) if fm.isplain(): hidepassword = util.hidepassword else: hidepassword = bytes if ui.quiet: namefmt = b'%s\n' else: namefmt = b'%s = ' showsubopts = not search and not ui.quiet for name, path in pathitems: fm.startitem() fm.condwrite(not search, b'name', namefmt, name) fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc)) for subopt, value in sorted(path.suboptions.items()): assert subopt not in (b'name', b'url') if showsubopts: fm.plain(b'%s:%s = ' % (name, subopt)) fm.condwrite(showsubopts, subopt, b'%s\n', value) fm.end() if search and not pathitems: if not ui.quiet: ui.warn(_(b"not found!\n")) return 1 else: return 0 @command( b'phase', [ (b'p', b'public', False, _(b'set changeset phase to public')), (b'd', b'draft', False, _(b'set changeset phase to draft')), (b's', b'secret', False, _(b'set changeset phase to secret')), (b'f', b'force', False, _(b'allow to move boundary backward')), (b'r', b'rev', [], _(b'target revision'), _(b'REV')), ], _(b'[-p|-d|-s] [-f] [-r] [REV...]'), helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, ) def phase(ui, repo, *revs, **opts): """set or show the current phase name With no argument, show the phase name of the current revision(s). With one of -p/--public, -d/--draft or -s/--secret, change the phase value of the specified revisions. Unless -f/--force is specified, :hg:`phase` won't move changesets from a lower phase to a higher phase. Phases are ordered as follows:: public < draft < secret Returns 0 on success, 1 if some phases could not be changed. (For more information about the phases concept, see :hg:`help phases`.) """ opts = pycompat.byteskwargs(opts) # search for a unique phase argument targetphase = None for idx, name in enumerate(phases.cmdphasenames): if opts[name]: if targetphase is not None: raise error.Abort(_(b'only one phase can be specified')) targetphase = idx # look for specified revision revs = list(revs) revs.extend(opts[b'rev']) if not revs: # display both parents as the second parent phase can influence # the phase of a merge commit revs = [c.rev() for c in repo[None].parents()] revs = scmutil.revrange(repo, revs) ret = 0 if targetphase is None: # display for r in revs: ctx = repo[r] ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr())) else: with repo.lock(), repo.transaction(b"phase") as tr: # set phase if not revs: raise error.Abort(_(b'empty revision set')) nodes = [repo[r].node() for r in revs] # moving revision from public to draft may hide them # We have to check result on an unfiltered repository unfi = repo.unfiltered() getphase = unfi._phasecache.phase olddata = [getphase(unfi, r) for r in unfi] phases.advanceboundary(repo, tr, targetphase, nodes) if opts[b'force']: phases.retractboundary(repo, tr, targetphase, nodes) getphase = unfi._phasecache.phase newdata = [getphase(unfi, r) for r in unfi] changes = sum(newdata[r] != olddata[r] for r in unfi) cl = unfi.changelog rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase] if rejected: ui.warn( _( b'cannot move %i changesets to a higher ' b'phase, use --force\n' ) % len(rejected) ) ret = 1 if changes: msg = _(b'phase changed for %i changesets\n') % changes if ret: ui.status(msg) else: ui.note(msg) else: ui.warn(_(b'no phases changed\n')) return ret def postincoming(ui, repo, modheads, optupdate, checkout, brev): """Run after a changegroup has been added via pull/unbundle This takes arguments below: :modheads: change of heads by pull/unbundle :optupdate: updating working directory is needed or not :checkout: update destination revision (or None to default destination) :brev: a name, which might be a bookmark to be activated after updating """ if modheads == 0: return if optupdate: try: return hg.updatetotally(ui, repo, checkout, brev) except error.UpdateAbort as inst: msg = _(b"not updating: %s") % stringutil.forcebytestr(inst) hint = inst.hint raise error.UpdateAbort(msg, hint=hint) if modheads is not None and modheads > 1: currentbranchheads = len(repo.branchheads()) if currentbranchheads == modheads: ui.status( _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n") ) elif currentbranchheads > 1: ui.status( _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n") ) else: ui.status(_(b"(run 'hg heads' to see heads)\n")) elif not ui.configbool(b'commands', b'update.requiredest'): ui.status(_(b"(run 'hg update' to get a working copy)\n")) @command( b'pull', [ ( b'u', b'update', None, _(b'update to new branch head if new descendants were pulled'), ), ( b'f', b'force', None, _(b'run even when remote repository is unrelated'), ), (b'', b'confirm', None, _(b'confirm pull before applying changes'),), ( b'r', b'rev', [], _(b'a remote changeset intended to be added'), _(b'REV'), ), (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')), ( b'b', b'branch', [], _(b'a specific branch you would like to pull'), _(b'BRANCH'), ), ] + remoteopts, _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) def pull(ui, repo, source=b"default", **opts): """pull changes from the specified source Pull changes from a remote repository to a local one. This finds all changes from the repository at the specified path or URL and adds them to a local repository (the current one unless -R is specified). By default, this does not update the copy of the project in the working directory. When cloning from servers that support it, Mercurial may fetch pre-generated data. When this is done, hooks operating on incoming changesets and changegroups may fire more than once, once for each pre-generated bundle and as well as for any additional remaining data. See :hg:`help -e clonebundles` for more. Use :hg:`incoming` if you want to see what would have been added by a pull at the time you issued this command. If you then decide to add those changes to the repository, you should use :hg:`pull -r X` where ``X`` is the last changeset listed by :hg:`incoming`. If SOURCE is omitted, the 'default' path will be used. See :hg:`help urls` for more information. Specifying bookmark as ``.`` is equivalent to specifying the active bookmark's name. Returns 0 on success, 1 if an update had unresolved files. """ opts = pycompat.byteskwargs(opts) if ui.configbool(b'commands', b'update.requiredest') and opts.get( b'update' ): msg = _(b'update destination required by configuration') hint = _(b'use hg pull followed by hg update DEST') raise error.Abort(msg, hint=hint) source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch')) ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) other = hg.peer(repo, opts, source) try: revs, checkout = hg.addbranchrevs( repo, other, branches, opts.get(b'rev') ) pullopargs = {} nodes = None if opts.get(b'bookmark') or revs: # The list of bookmark used here is the same used to actually update # the bookmark names, to avoid the race from issue 4689 and we do # all lookup and bookmark queries in one go so they see the same # version of the server state (issue 4700). nodes = [] fnodes = [] revs = revs or [] if revs and not other.capable(b'lookup'): err = _( b"other repository doesn't support revision lookup, " b"so a rev cannot be specified." ) raise error.Abort(err) with other.commandexecutor() as e: fremotebookmarks = e.callcommand( b'listkeys', {b'namespace': b'bookmarks'} ) for r in revs: fnodes.append(e.callcommand(b'lookup', {b'key': r})) remotebookmarks = fremotebookmarks.result() remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) pullopargs[b'remotebookmarks'] = remotebookmarks for b in opts.get(b'bookmark', []): b = repo._bookmarks.expandname(b) if b not in remotebookmarks: raise error.Abort(_(b'remote bookmark %s not found!') % b) nodes.append(remotebookmarks[b]) for i, rev in enumerate(revs): node = fnodes[i].result() nodes.append(node) if rev == checkout: checkout = node wlock = util.nullcontextmanager() if opts.get(b'update'): wlock = repo.wlock() with wlock: pullopargs.update(opts.get(b'opargs', {})) modheads = exchange.pull( repo, other, heads=nodes, force=opts.get(b'force'), bookmarks=opts.get(b'bookmark', ()), opargs=pullopargs, confirm=opts.get(b'confirm'), ).cgresult # brev is a name, which might be a bookmark to be activated at # the end of the update. In other words, it is an explicit # destination of the update brev = None if checkout: checkout = repo.unfiltered().changelog.rev(checkout) # order below depends on implementation of # hg.addbranchrevs(). opts['bookmark'] is ignored, # because 'checkout' is determined without it. if opts.get(b'rev'): brev = opts[b'rev'][0] elif opts.get(b'branch'): brev = opts[b'branch'][0] else: brev = branches[0] repo._subtoppath = source try: ret = postincoming( ui, repo, modheads, opts.get(b'update'), checkout, brev ) except error.FilteredRepoLookupError as exc: msg = _(b'cannot update to target: %s') % exc.args[0] exc.args = (msg,) + exc.args[1:] raise finally: del repo._subtoppath finally: other.close() return ret @command( b'push', [ (b'f', b'force', None, _(b'force push')), ( b'r', b'rev', [], _(b'a changeset intended to be included in the destination'), _(b'REV'), ), (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')), ( b'b', b'branch', [], _(b'a specific branch you would like to push'), _(b'BRANCH'), ), (b'', b'new-branch', False, _(b'allow pushing a new branch')), ( b'', b'pushvars', [], _(b'variables that can be sent to server (ADVANCED)'), ), ( b'', b'publish', False, _(b'push the changeset as public (EXPERIMENTAL)'), ), ] + remoteopts, _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) def push(ui, repo, dest=None, **opts): """push changes to the specified destination Push changesets from the local repository to the specified destination. This operation is symmetrical to pull: it is identical to a pull in the destination repository from the current one. By default, push will not allow creation of new heads at the destination, since multiple heads would make it unclear which head to use. In this situation, it is recommended to pull and merge before pushing. Use --new-branch if you want to allow push to create a new named branch that is not present at the destination. This allows you to only create a new branch without forcing other changes. .. note:: Extra care should be taken with the -f/--force option, which will push all new heads on all branches, an action which will almost always cause confusion for collaborators. If -r/--rev is used, the specified revision and all its ancestors will be pushed to the remote repository. If -B/--bookmark is used, the specified bookmarked revision, its ancestors, and the bookmark will be pushed to the remote repository. Specifying ``.`` is equivalent to specifying the active bookmark's name. Please see :hg:`help urls` for important details about ``ssh://`` URLs. If DESTINATION is omitted, a default path will be used. .. container:: verbose The --pushvars option sends strings to the server that become environment variables prepended with ``HG_USERVAR_``. For example, ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment. pushvars can provide for user-overridable hooks as well as set debug levels. One example is having a hook that blocks commits containing conflict markers, but enables the user to override the hook if the file is using conflict markers for testing purposes or the file format has strings that look like conflict markers. By default, servers will ignore `--pushvars`. To enable it add the following to your configuration file:: [push] pushvars.server = true Returns 0 if push was successful, 1 if nothing to push. """ opts = pycompat.byteskwargs(opts) if opts.get(b'bookmark'): ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push') for b in opts[b'bookmark']: # translate -B options to -r so changesets get pushed b = repo._bookmarks.expandname(b) if b in repo._bookmarks: opts.setdefault(b'rev', []).append(b) else: # if we try to push a deleted bookmark, translate it to null # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") path = ui.paths.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( _(b'default repository not configured!'), hint=_(b"see 'hg help config.paths'"), ) dest = path.pushloc or path.loc branches = (path.branch, opts.get(b'branch') or []) ui.status(_(b'pushing to %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) other = hg.peer(repo, opts, dest) if revs: revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] if not revs: raise error.Abort( _(b"specified revisions evaluate to an empty set"), hint=_(b"use different revision arguments"), ) elif path.pushrev: # It doesn't make any sense to specify ancestor revisions. So limit # to DAG heads to make discovery simpler. expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) revs = scmutil.revrange(repo, [expr]) revs = [repo[rev].node() for rev in revs] if not revs: raise error.Abort( _(b'default push revset for path evaluates to an empty set') ) elif ui.configbool(b'commands', b'push.require-revs'): raise error.Abort( _(b'no revisions specified to push'), hint=_(b'did you mean "hg push -r ."?'), ) repo._subtoppath = dest try: # push subrepos depth-first for coherent ordering c = repo[b'.'] subs = c.substate # only repos that are committed for s in sorted(subs): result = c.sub(s).push(opts) if result == 0: return not result finally: del repo._subtoppath opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) pushop = exchange.push( repo, other, opts.get(b'force'), revs=revs, newbranch=opts.get(b'new_branch'), bookmarks=opts.get(b'bookmark', ()), publish=opts.get(b'publish'), opargs=opargs, ) result = not pushop.cgresult if pushop.bkresult is not None: if pushop.bkresult == 2: result = 2 elif not result and pushop.bkresult: result = 2 return result @command( b'recover', [(b'', b'verify', False, b"run `hg verify` after successful recover"),], helpcategory=command.CATEGORY_MAINTENANCE, ) def recover(ui, repo, **opts): """roll back an interrupted transaction Recover from an interrupted commit or pull. This command tries to fix the repository status after an interrupted operation. It should only be necessary when Mercurial suggests it. Returns 0 if successful, 1 if nothing to recover or verify fails. """ ret = repo.recover() if ret: if opts['verify']: return hg.verify(repo) else: msg = _( b"(verify step skipped, run `hg verify` to check your " b"repository content)\n" ) ui.warn(msg) return 0 return 1 @command( b'remove|rm', [ (b'A', b'after', None, _(b'record delete for missing files')), (b'f', b'force', None, _(b'forget added files, delete modified files')), ] + subrepoopts + walkopts + dryrunopts, _(b'[OPTION]... FILE...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, inferrepo=True, ) def remove(ui, repo, *pats, **opts): """remove the specified files on the next commit Schedule the indicated files for removal from the current branch. This command schedules the files to be removed at the next commit. To undo a remove before that, see :hg:`revert`. To undo added files, see :hg:`forget`. .. container:: verbose -A/--after can be used to remove only files that have already been deleted, -f/--force can be used to force deletion, and -Af can be used to remove files from the next revision without deleting them from the working directory. The following table details the behavior of remove for different file states (columns) and option combinations (rows). The file states are Added [A], Clean [C], Modified [M] and Missing [!] (as reported by :hg:`status`). The actions are Warn, Remove (from branch) and Delete (from disk): ========= == == == == opt/state A C M ! ========= == == == == none W RD W R -f R RD RD R -A W W W R -Af R R R R ========= == == == == .. note:: :hg:`remove` never deletes files in Added [A] state from the working directory, not even if ``--force`` is specified. Returns 0 on success, 1 if any warnings encountered. """ opts = pycompat.byteskwargs(opts) after, force = opts.get(b'after'), opts.get(b'force') dryrun = opts.get(b'dry_run') if not pats and not after: raise error.Abort(_(b'no files specified')) m = scmutil.match(repo[None], pats, opts) subrepos = opts.get(b'subrepos') uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) return cmdutil.remove( ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun ) @command( b'rename|move|mv', [ (b'A', b'after', None, _(b'record a rename that has already occurred')), ( b'', b'at-rev', b'', _(b'(un)mark renames in the given revision (EXPERIMENTAL)'), _(b'REV'), ), ( b'f', b'force', None, _(b'forcibly move over an existing managed file'), ), ] + walkopts + dryrunopts, _(b'[OPTION]... SOURCE... DEST'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def rename(ui, repo, *pats, **opts): """rename files; equivalent of copy + remove Mark dest as copies of sources; mark sources for deletion. If dest is a directory, copies are put in that directory. If dest is a file, there can only be one source. By default, this command copies the contents of files as they exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. This command takes effect at the next commit. To undo a rename before that, see :hg:`revert`. Returns 0 on success, 1 if errors are encountered. """ opts = pycompat.byteskwargs(opts) with repo.wlock(): return cmdutil.copy(ui, repo, pats, opts, rename=True) @command( b'resolve', [ (b'a', b'all', None, _(b'select all unresolved files')), (b'l', b'list', None, _(b'list state of files needing merge')), (b'm', b'mark', None, _(b'mark files as resolved')), (b'u', b'unmark', None, _(b'mark files as unresolved')), (b'n', b'no-status', None, _(b'hide status prefix')), (b'', b're-merge', None, _(b're-merge files')), ] + mergetoolopts + walkopts + formatteropts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, inferrepo=True, ) def resolve(ui, repo, *pats, **opts): """redo merges or set/view the merge status of files Merges with unresolved conflicts are often the result of non-interactive merging using the ``internal:merge`` configuration setting, or a command-line merge tool like ``diff3``. The resolve command is used to manage the files involved in a merge, after :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the working directory must have two parents). See :hg:`help merge-tools` for information on configuring merge tools. The resolve command can be used in the following ways: - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge the specified files, discarding any previous merge attempts. Re-merging is not performed for files already marked as resolved. Use ``--all/-a`` to select all unresolved files. ``--tool`` can be used to specify the merge tool used for the given files. It overrides the HGMERGE environment variable and your configuration files. Previous file contents are saved with a ``.orig`` suffix. - :hg:`resolve -m [FILE]`: mark a file as having been resolved (e.g. after having manually fixed-up the files). The default is to mark all unresolved files. - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The default is to mark all resolved files. - :hg:`resolve -l`: list files which had or still have conflicts. In the printed list, ``U`` = unresolved and ``R`` = resolved. You can use ``set:unresolved()`` or ``set:resolved()`` to filter the list. See :hg:`help filesets` for details. .. note:: Mercurial will not let you commit files with unresolved merge conflicts. You must use :hg:`resolve -m ...` before you can commit after a conflicting merge. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``. :path: String. Repository-absolute path of the file. Returns 0 on success, 1 if any files fail a resolve attempt. """ opts = pycompat.byteskwargs(opts) confirm = ui.configbool(b'commands', b'resolve.confirm') flaglist = b'all mark unmark list no_status re_merge'.split() all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist] actioncount = len(list(filter(None, [show, mark, unmark, remerge]))) if actioncount > 1: raise error.Abort(_(b"too many actions specified")) elif actioncount == 0 and ui.configbool( b'commands', b'resolve.explicit-re-merge' ): hint = _(b'use --mark, --unmark, --list or --re-merge') raise error.Abort(_(b'no action specified'), hint=hint) if pats and all: raise error.Abort(_(b"can't specify --all and patterns")) if not (all or pats or show or mark or unmark): raise error.Abort( _(b'no files or directories specified'), hint=b'use --all to re-merge all unresolved files', ) if confirm: if all: if ui.promptchoice( _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No') ): raise error.Abort(_(b'user quit')) if mark and not pats: if ui.promptchoice( _( b'mark all unresolved files as resolved (yn)?' b'$$ &Yes $$ &No' ) ): raise error.Abort(_(b'user quit')) if unmark and not pats: if ui.promptchoice( _( b'mark all resolved files as unresolved (yn)?' b'$$ &Yes $$ &No' ) ): raise error.Abort(_(b'user quit')) uipathfn = scmutil.getuipathfn(repo) if show: ui.pager(b'resolve') fm = ui.formatter(b'resolve', opts) ms = mergestatemod.mergestate.read(repo) wctx = repo[None] m = scmutil.match(wctx, pats, opts) # Labels and keys based on merge state. Unresolved path conflicts show # as 'P'. Resolved path conflicts show as 'R', the same as normal # resolved conflicts. mergestateinfo = { mergestatemod.MERGE_RECORD_UNRESOLVED: ( b'resolve.unresolved', b'U', ), mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'), mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: ( b'resolve.unresolved', b'P', ), mergestatemod.MERGE_RECORD_RESOLVED_PATH: ( b'resolve.resolved', b'R', ), } for f in ms: if not m(f): continue label, key = mergestateinfo[ms[f]] fm.startitem() fm.context(ctx=wctx) fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label) fm.data(path=f) fm.plain(b'%s\n' % uipathfn(f), label=label) fm.end() return 0 with repo.wlock(): ms = mergestatemod.mergestate.read(repo) if not (ms.active() or repo.dirstate.p2() != nullid): raise error.Abort( _(b'resolve command not applicable when not merging') ) wctx = repo[None] m = scmutil.match(wctx, pats, opts) ret = 0 didwork = False tocomplete = [] hasconflictmarkers = [] if mark: markcheck = ui.config(b'commands', b'resolve.mark-check') if markcheck not in [b'warn', b'abort']: # Treat all invalid / unrecognized values as 'none'. markcheck = False for f in ms: if not m(f): continue didwork = True # path conflicts must be resolved manually if ms[f] in ( mergestatemod.MERGE_RECORD_UNRESOLVED_PATH, mergestatemod.MERGE_RECORD_RESOLVED_PATH, ): if mark: ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH) elif unmark: ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH) elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: ui.warn( _(b'%s: path conflict must be resolved manually\n') % uipathfn(f) ) continue if mark: if markcheck: fdata = repo.wvfs.tryread(f) if ( filemerge.hasconflictmarkers(fdata) and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED ): hasconflictmarkers.append(f) ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED) elif unmark: ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED) else: # backup pre-resolve (merge uses .orig for its own purposes) a = repo.wjoin(f) try: util.copyfile(a, a + b".resolve") except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise try: # preresolve file overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} with ui.configoverride(overrides, b'resolve'): complete, r = ms.preresolve(f, wctx) if not complete: tocomplete.append(f) elif r: ret = 1 finally: ms.commit() # replace filemerge's .orig file with our resolve file, but only # for merges that are complete if complete: try: util.rename( a + b".resolve", scmutil.backuppath(ui, repo, f) ) except OSError as inst: if inst.errno != errno.ENOENT: raise if hasconflictmarkers: ui.warn( _( b'warning: the following files still have conflict ' b'markers:\n' ) + b''.join( b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers ) ) if markcheck == b'abort' and not all and not pats: raise error.Abort( _(b'conflict markers detected'), hint=_(b'use --all to mark anyway'), ) for f in tocomplete: try: # resolve file overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} with ui.configoverride(overrides, b'resolve'): r = ms.resolve(f, wctx) if r: ret = 1 finally: ms.commit() # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) try: util.rename(a + b".resolve", scmutil.backuppath(ui, repo, f)) except OSError as inst: if inst.errno != errno.ENOENT: raise ms.commit() branchmerge = repo.dirstate.p2() != nullid mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None) if not didwork and pats: hint = None if not any([p for p in pats if p.find(b':') >= 0]): pats = [b'path:%s' % p for p in pats] m = scmutil.match(wctx, pats, opts) for f in ms: if not m(f): continue def flag(o): if o == b're_merge': return b'--re-merge ' return b'-%s ' % o[0:1] flags = b''.join([flag(o) for o in flaglist if opts.get(o)]) hint = _(b"(try: hg resolve %s%s)\n") % ( flags, b' '.join(pats), ) break ui.warn(_(b"arguments do not match paths that need resolving\n")) if hint: ui.warn(hint) unresolvedf = list(ms.unresolved()) if not unresolvedf: ui.status(_(b'(no more unresolved files)\n')) cmdutil.checkafterresolved(repo) return ret @command( b'revert', [ (b'a', b'all', None, _(b'revert all changes when no arguments given')), (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')), (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')), (b'C', b'no-backup', None, _(b'do not save backup copies of files')), (b'i', b'interactive', None, _(b'interactively select the changes')), ] + walkopts + dryrunopts, _(b'[OPTION]... [-r REV] [NAME]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def revert(ui, repo, *pats, **opts): """restore files to their checkout state .. note:: To check out earlier revisions, you should use :hg:`update REV`. To cancel an uncommitted merge (and lose your changes), use :hg:`merge --abort`. With no revision specified, revert the specified files or directories to the contents they had in the parent of the working directory. This restores the contents of files to an unmodified state and unschedules adds, removes, copies, and renames. If the working directory has two parents, you must explicitly specify a revision. Using the -r/--rev or -d/--date options, revert the given files or directories to their states as of a specific revision. Because revert does not change the working directory parents, this will cause these files to appear modified. This can be helpful to "back out" some or all of an earlier change. See :hg:`backout` for a related method. Modified files are saved with a .orig suffix before reverting. To disable these backups, use --no-backup. It is possible to store the backup files in a custom directory relative to the root of the repository by setting the ``ui.origbackuppath`` configuration option. See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help backout` for a way to reverse the effect of an earlier changeset. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) if opts.get(b"date"): cmdutil.check_incompatible_arguments(opts, b'date', [b'rev']) opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"]) parent, p2 = repo.dirstate.parents() if not opts.get(b'rev') and p2 != nullid: # revert after merge is a trap for new users (issue2915) raise error.Abort( _(b'uncommitted merge with no revision specified'), hint=_(b"use 'hg update' or see 'hg help revert'"), ) rev = opts.get(b'rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev) if not ( pats or opts.get(b'include') or opts.get(b'exclude') or opts.get(b'all') or opts.get(b'interactive') ): msg = _(b"no files or directories specified") if p2 != nullid: hint = _( b"uncommitted merge, use --all to discard all changes," b" or 'hg update -C .' to abort the merge" ) raise error.Abort(msg, hint=hint) dirty = any(repo.status()) node = ctx.node() if node != parent: if dirty: hint = ( _( b"uncommitted changes, use --all to discard all" b" changes, or 'hg update %d' to update" ) % ctx.rev() ) else: hint = ( _( b"use --all to revert all files," b" or 'hg update %d' to update" ) % ctx.rev() ) elif dirty: hint = _(b"uncommitted changes, use --all to discard all changes") else: hint = _(b"use --all to revert all files") raise error.Abort(msg, hint=hint) return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts)) @command( b'rollback', dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))], helpcategory=command.CATEGORY_MAINTENANCE, ) def rollback(ui, repo, **opts): """roll back the last transaction (DANGEROUS) (DEPRECATED) Please use :hg:`commit --amend` instead of rollback to correct mistakes in the last commit. This command should be used with care. There is only one level of rollback, and there is no way to undo a rollback. It will also restore the dirstate at the time of the last transaction, losing any dirstate changes since that time. This command does not alter the working directory. Transactions are used to encapsulate the effects of all commands that create new changesets or propagate existing changesets into a repository. .. container:: verbose For example, the following commands are transactional, and their effects can be rolled back: - commit - import - pull - push (with this repository as the destination) - unbundle To avoid permanent data loss, rollback will refuse to rollback a commit transaction if it isn't checked out. Use --force to override this protection. The rollback command can be entirely disabled by setting the ``ui.rollback`` configuration setting to false. If you're here because you want to use rollback and it's disabled, you can re-enable the command by setting ``ui.rollback`` to true. This command is not intended for use on public repositories. Once changes are visible for pull by other users, rolling a transaction back locally is ineffective (someone else may already have pulled the changes). Furthermore, a race is possible with readers of the repository; for example an in-progress pull from the repository may fail if a rollback is performed. Returns 0 on success, 1 if no rollback data is available. """ if not ui.configbool(b'ui', b'rollback'): raise error.Abort( _(b'rollback is disabled because it is unsafe'), hint=b'see `hg help -v rollback` for information', ) return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force')) @command( b'root', [] + formatteropts, intents={INTENT_READONLY}, helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def root(ui, repo, **opts): """print the root (top) of the current working directory Print the root directory of the current repository. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :hgpath: String. Path to the .hg directory. :storepath: String. Path to the directory holding versioned data. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) with ui.formatter(b'root', opts) as fm: fm.startitem() fm.write(b'reporoot', b'%s\n', repo.root) fm.data(hgpath=repo.path, storepath=repo.spath) @command( b'serve', [ ( b'A', b'accesslog', b'', _(b'name of access log file to write to'), _(b'FILE'), ), (b'd', b'daemon', None, _(b'run server in background')), (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')), ( b'E', b'errorlog', b'', _(b'name of error log file to write to'), _(b'FILE'), ), # use string type, then we can check if something was passed ( b'p', b'port', b'', _(b'port to listen on (default: 8000)'), _(b'PORT'), ), ( b'a', b'address', b'', _(b'address to listen on (default: all interfaces)'), _(b'ADDR'), ), ( b'', b'prefix', b'', _(b'prefix path to serve from (default: server root)'), _(b'PREFIX'), ), ( b'n', b'name', b'', _(b'name to show in web pages (default: working directory)'), _(b'NAME'), ), ( b'', b'web-conf', b'', _(b"name of the hgweb config file (see 'hg help hgweb')"), _(b'FILE'), ), ( b'', b'webdir-conf', b'', _(b'name of the hgweb config file (DEPRECATED)'), _(b'FILE'), ), ( b'', b'pid-file', b'', _(b'name of file to write process ID to'), _(b'FILE'), ), (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')), ( b'', b'cmdserver', b'', _(b'for remote clients (ADVANCED)'), _(b'MODE'), ), (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')), (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')), (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')), (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')), (b'', b'print-url', None, _(b'start and print only the URL')), ] + subrepoopts, _(b'[OPTION]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, optionalrepo=True, ) def serve(ui, repo, **opts): """start stand-alone webserver Start a local HTTP repository browser and pull server. You can use this for ad-hoc sharing and browsing of repositories. It is recommended to use a real web server to serve a repository for longer periods of time. Please note that the server does not implement access control. This means that, by default, anybody can read from the server and nobody can write to it by default. Set the ``web.allow-push`` option to ``*`` to allow everybody to push to the server. You should use a real web server if you need to authenticate users. By default, the server logs accesses to stdout and errors to stderr. Use the -A/--accesslog and -E/--errorlog options to log to files. To have the server choose a free port number to listen on, specify a port number of 0; in this case, the server will print the port number it uses. Returns 0 on success. """ cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver']) opts = pycompat.byteskwargs(opts) if opts[b"print_url"] and ui.verbose: raise error.Abort(_(b"cannot use --print-url with --verbose")) if opts[b"stdio"]: if repo is None: raise error.RepoError( _(b"there is no Mercurial repository here (.hg not found)") ) s = wireprotoserver.sshserver(ui, repo) s.serve_forever() service = server.createservice(ui, repo, opts) return server.runservice(opts, initfn=service.init, runfn=service.run) @command( b'shelve', [ ( b'A', b'addremove', None, _(b'mark new/missing files as added/removed before shelving'), ), (b'u', b'unknown', None, _(b'store unknown files in the shelve')), (b'', b'cleanup', None, _(b'delete all shelved changes')), ( b'', b'date', b'', _(b'shelve with the specified commit date'), _(b'DATE'), ), (b'd', b'delete', None, _(b'delete the named shelved change(s)')), (b'e', b'edit', False, _(b'invoke editor on commit messages')), ( b'k', b'keep', False, _(b'shelve, but keep changes in the working directory'), ), (b'l', b'list', None, _(b'list current shelves')), (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')), ( b'n', b'name', b'', _(b'use the given name for the shelved commit'), _(b'NAME'), ), ( b'p', b'patch', None, _( b'output patches for changes (provide the names of the shelved ' b'changes as positional arguments)' ), ), (b'i', b'interactive', None, _(b'interactive mode')), ( b'', b'stat', None, _( b'output diffstat-style summary of changes (provide the names of ' b'the shelved changes as positional arguments)' ), ), ] + cmdutil.walkopts, _(b'hg shelve [OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def shelve(ui, repo, *pats, **opts): '''save and set aside changes from the working directory Shelving takes files that "hg status" reports as not clean, saves the modifications to a bundle (a shelved change), and reverts the files so that their state in the working directory becomes clean. To restore these changes to the working directory, using "hg unshelve"; this will work even if you switch to a different commit. When no files are specified, "hg shelve" saves all not-clean files. If specific files or directories are named, only changes to those files are shelved. In bare shelve (when no files are specified, without interactive, include and exclude option), shelving remembers information if the working directory was on newly created branch, in other words working directory was on different branch than its first parent. In this situation unshelving restores branch information to the working directory. Each shelved change has a name that makes it easier to find later. The name of a shelved change defaults to being based on the active bookmark, or if there is no active bookmark, the current named branch. To specify a different name, use ``--name``. To see a list of existing shelved changes, use the ``--list`` option. For each shelved change, this will print its name, age, and description; use ``--patch`` or ``--stat`` for more details. To delete specific shelved changes, use ``--delete``. To delete all shelved changes, use ``--cleanup``. ''' opts = pycompat.byteskwargs(opts) allowables = [ (b'addremove', {b'create'}), # 'create' is pseudo action (b'unknown', {b'create'}), (b'cleanup', {b'cleanup'}), # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests (b'delete', {b'delete'}), (b'edit', {b'create'}), (b'keep', {b'create'}), (b'list', {b'list'}), (b'message', {b'create'}), (b'name', {b'create'}), (b'patch', {b'patch', b'list'}), (b'stat', {b'stat', b'list'}), ] def checkopt(opt): if opts.get(opt): for i, allowable in allowables: if opts[i] and opt not in allowable: raise error.Abort( _( b"options '--%s' and '--%s' may not be " b"used together" ) % (opt, i) ) return True if checkopt(b'cleanup'): if pats: raise error.Abort(_(b"cannot specify names when using '--cleanup'")) return shelvemod.cleanupcmd(ui, repo) elif checkopt(b'delete'): return shelvemod.deletecmd(ui, repo, pats) elif checkopt(b'list'): return shelvemod.listcmd(ui, repo, pats, opts) elif checkopt(b'patch') or checkopt(b'stat'): return shelvemod.patchcmds(ui, repo, pats, opts) else: return shelvemod.createcmd(ui, repo, pats, opts) _NOTTERSE = b'nothing' @command( b'status|st', [ (b'A', b'all', None, _(b'show status of all files')), (b'm', b'modified', None, _(b'show only modified files')), (b'a', b'added', None, _(b'show only added files')), (b'r', b'removed', None, _(b'show only removed files')), (b'd', b'deleted', None, _(b'show only missing files')), (b'c', b'clean', None, _(b'show only files without changes')), (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')), (b'i', b'ignored', None, _(b'show only ignored files')), (b'n', b'no-status', None, _(b'hide status prefix')), (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')), ( b'C', b'copies', None, _(b'show source of copied files (DEFAULT: ui.statuscopies)'), ), ( b'0', b'print0', None, _(b'end filenames with NUL, for use with xargs'), ), (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')), ( b'', b'change', b'', _(b'list the changed files of a revision'), _(b'REV'), ), ] + walkopts + subrepoopts + formatteropts, _(b'[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, inferrepo=True, intents={INTENT_READONLY}, ) def status(ui, repo, *pats, **opts): """show changed files in the working directory Show status of files in the repository. If names are given, only files that match are shown. Files that are clean or ignored or the source of a copy/move operation, are not listed unless -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. Unless options described with "show only ..." are given, the options -mardu are used. Option -q/--quiet hides untracked (unknown and ignored) files unless explicitly requested with -u/--unknown or -i/--ignored. .. note:: :hg:`status` may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not report permission changes and diff only reports changes relative to one merge parent. If one revision is given, it is used as the base revision. If two revisions are given, the differences between them are shown. The --change option can also be used as a shortcut to list the changed files of a revision from its first parent. The codes used to show the status of files are:: M = modified A = added R = removed C = clean ! = missing (deleted by non-hg command, but still tracked) ? = not tracked I = ignored = origin of the previous file (with --copies) .. container:: verbose The -t/--terse option abbreviates the output by showing only the directory name if all the files in it share the same status. The option takes an argument indicating the statuses to abbreviate: 'm' for 'modified', 'a' for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i' for 'ignored' and 'c' for clean. It abbreviates only those statuses which are passed. Note that clean and ignored files are not displayed with '--terse ic' unless the -c/--clean and -i/--ignored options are also used. The -v/--verbose option shows information when the repository is in an unfinished merge, shelve, rebase state etc. You can have this behavior turned on by default by enabling the ``commands.status.verbose`` option. You can skip displaying some of these states by setting ``commands.status.skipstates`` to one or more of: 'bisect', 'graft', 'histedit', 'merge', 'rebase', or 'unshelve'. Template: The following keywords are supported in addition to the common template keywords and functions. See also :hg:`help templates`. :path: String. Repository-absolute path of the file. :source: String. Repository-absolute path of the file originated from. Available if ``--copies`` is specified. :status: String. Character denoting file's status. Examples: - show changes in the working directory relative to a changeset:: hg status --rev 9353 - show changes in the working directory relative to the current directory (see :hg:`help patterns` for more information):: hg status re: - show all changes including copies in an existing changeset:: hg status --copies --change 9353 - get a NUL separated list of added files, suitable for xargs:: hg status -an0 - show more information about the repository status, abbreviating added, removed, modified, deleted, and untracked paths:: hg status -v -t mardu Returns 0 on success. """ cmdutil.check_at_most_one_arg(opts, 'rev', 'change') opts = pycompat.byteskwargs(opts) revs = opts.get(b'rev') change = opts.get(b'change') terse = opts.get(b'terse') if terse is _NOTTERSE: if revs: terse = b'' else: terse = ui.config(b'commands', b'status.terse') if revs and terse: msg = _(b'cannot use --terse with --rev') raise error.Abort(msg) elif change: repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn') ctx2 = scmutil.revsingle(repo, change, None) ctx1 = ctx2.p1() else: repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn') ctx1, ctx2 = scmutil.revpair(repo, revs) forcerelativevalue = None if ui.hasconfig(b'commands', b'status.relative'): forcerelativevalue = ui.configbool(b'commands', b'status.relative') uipathfn = scmutil.getuipathfn( repo, legacyrelativevalue=bool(pats), forcerelativevalue=forcerelativevalue, ) if opts.get(b'print0'): end = b'\0' else: end = b'\n' states = b'modified added removed deleted unknown ignored clean'.split() show = [k for k in states if opts.get(k)] if opts.get(b'all'): show += ui.quiet and (states[:4] + [b'clean']) or states if not show: if ui.quiet: show = states[:4] else: show = states[:5] m = scmutil.match(ctx2, pats, opts) if terse: # we need to compute clean and unknown to terse stat = repo.status( ctx1.node(), ctx2.node(), m, b'ignored' in show or b'i' in terse, clean=True, unknown=True, listsubrepos=opts.get(b'subrepos'), ) stat = cmdutil.tersedir(stat, terse) else: stat = repo.status( ctx1.node(), ctx2.node(), m, b'ignored' in show, b'clean' in show, b'unknown' in show, opts.get(b'subrepos'), ) changestates = zip( states, pycompat.iterbytestr(b'MAR!?IC'), [getattr(stat, s.decode('utf8')) for s in states], ) copy = {} if ( opts.get(b'all') or opts.get(b'copies') or ui.configbool(b'ui', b'statuscopies') ) and not opts.get(b'no_status'): copy = copies.pathcopies(ctx1, ctx2, m) morestatus = None if ( ui.verbose or ui.configbool(b'commands', b'status.verbose') ) and not ui.plain(): morestatus = cmdutil.readmorestatus(repo) ui.pager(b'status') fm = ui.formatter(b'status', opts) fmt = b'%s' + end showchar = not opts.get(b'no_status') for state, char, files in changestates: if state in show: label = b'status.' + state for f in files: fm.startitem() fm.context(ctx=ctx2) fm.data(itemtype=b'file', path=f) fm.condwrite(showchar, b'status', b'%s ', char, label=label) fm.plain(fmt % uipathfn(f), label=label) if f in copy: fm.data(source=copy[f]) fm.plain( (b' %s' + end) % uipathfn(copy[f]), label=b'status.copied', ) if morestatus: morestatus.formatfile(f, fm) if morestatus: morestatus.formatfooter(fm) fm.end() @command( b'summary|sum', [(b'', b'remote', None, _(b'check for push and pull'))], b'[--remote]', helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, intents={INTENT_READONLY}, ) def summary(ui, repo, **opts): """summarize working directory state This generates a brief summary of the working directory state, including parents, branch, commit status, phase and available updates. With the --remote option, this will check the default paths for incoming and outgoing changes. This can be time-consuming. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) ui.pager(b'summary') ctx = repo[None] parents = ctx.parents() pnode = parents[0].node() marks = [] try: ms = mergestatemod.mergestate.read(repo) except error.UnsupportedMergeRecords as e: s = b' '.join(e.recordtypes) ui.warn( _(b'warning: merge state has unsupported record types: %s\n') % s ) unresolved = [] else: unresolved = list(ms.unresolved()) for p in parents: # label with log.changeset (instead of log.parent) since this # shows a working directory parent *changeset*: # i18n: column positioning for "hg summary" ui.write( _(b'parent: %d:%s ') % (p.rev(), p), label=logcmdutil.changesetlabels(p), ) ui.write(b' '.join(p.tags()), label=b'log.tag') if p.bookmarks(): marks.extend(p.bookmarks()) if p.rev() == -1: if not len(repo): ui.write(_(b' (empty repository)')) else: ui.write(_(b' (no revision checked out)')) if p.obsolete(): ui.write(_(b' (obsolete)')) if p.isunstable(): instabilities = ( ui.label(instability, b'trouble.%s' % instability) for instability in p.instabilities() ) ui.write(b' (' + b', '.join(instabilities) + b')') ui.write(b'\n') if p.description(): ui.status( b' ' + p.description().splitlines()[0].strip() + b'\n', label=b'log.summary', ) branch = ctx.branch() bheads = repo.branchheads(branch) # i18n: column positioning for "hg summary" m = _(b'branch: %s\n') % branch if branch != b'default': ui.write(m, label=b'log.branch') else: ui.status(m, label=b'log.branch') if marks: active = repo._activebookmark # i18n: column positioning for "hg summary" ui.write(_(b'bookmarks:'), label=b'log.bookmark') if active is not None: if active in marks: ui.write(b' *' + active, label=bookmarks.activebookmarklabel) marks.remove(active) else: ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel) for m in marks: ui.write(b' ' + m, label=b'log.bookmark') ui.write(b'\n', label=b'log.bookmark') status = repo.status(unknown=True) c = repo.dirstate.copies() copied, renamed = [], [] for d, s in pycompat.iteritems(c): if s in status.removed: status.removed.remove(s) renamed.append(d) else: copied.append(d) if d in status.added: status.added.remove(d) subs = [s for s in ctx.substate if ctx.sub(s).dirty()] labels = [ (ui.label(_(b'%d modified'), b'status.modified'), status.modified), (ui.label(_(b'%d added'), b'status.added'), status.added), (ui.label(_(b'%d removed'), b'status.removed'), status.removed), (ui.label(_(b'%d renamed'), b'status.copied'), renamed), (ui.label(_(b'%d copied'), b'status.copied'), copied), (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted), (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown), (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved), (ui.label(_(b'%d subrepos'), b'status.modified'), subs), ] t = [] for l, s in labels: if s: t.append(l % len(s)) t = b', '.join(t) cleanworkdir = False if repo.vfs.exists(b'graftstate'): t += _(b' (graft in progress)') if repo.vfs.exists(b'updatestate'): t += _(b' (interrupted update)') elif len(parents) > 1: t += _(b' (merge)') elif branch != parents[0].branch(): t += _(b' (new branch)') elif parents[0].closesbranch() and pnode in repo.branchheads( branch, closed=True ): t += _(b' (head closed)') elif not ( status.modified or status.added or status.removed or renamed or copied or subs ): t += _(b' (clean)') cleanworkdir = True elif pnode not in bheads: t += _(b' (new branch head)') if parents: pendingphase = max(p.phase() for p in parents) else: pendingphase = phases.public if pendingphase > phases.newcommitphase(ui): t += b' (%s)' % phases.phasenames[pendingphase] if cleanworkdir: # i18n: column positioning for "hg summary" ui.status(_(b'commit: %s\n') % t.strip()) else: # i18n: column positioning for "hg summary" ui.write(_(b'commit: %s\n') % t.strip()) # all ancestors of branch heads - all ancestors of parent = new csets new = len( repo.changelog.findmissing([pctx.node() for pctx in parents], bheads) ) if new == 0: # i18n: column positioning for "hg summary" ui.status(_(b'update: (current)\n')) elif pnode not in bheads: # i18n: column positioning for "hg summary" ui.write(_(b'update: %d new changesets (update)\n') % new) else: # i18n: column positioning for "hg summary" ui.write( _(b'update: %d new changesets, %d branch heads (merge)\n') % (new, len(bheads)) ) t = [] draft = len(repo.revs(b'draft()')) if draft: t.append(_(b'%d draft') % draft) secret = len(repo.revs(b'secret()')) if secret: t.append(_(b'%d secret') % secret) if draft or secret: ui.status(_(b'phases: %s\n') % b', '.join(t)) if obsolete.isenabled(repo, obsolete.createmarkersopt): for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"): numtrouble = len(repo.revs(trouble + b"()")) # We write all the possibilities to ease translation troublemsg = { b"orphan": _(b"orphan: %d changesets"), b"contentdivergent": _(b"content-divergent: %d changesets"), b"phasedivergent": _(b"phase-divergent: %d changesets"), } if numtrouble > 0: ui.status(troublemsg[trouble] % numtrouble + b"\n") cmdutil.summaryhooks(ui, repo) if opts.get(b'remote'): needsincoming, needsoutgoing = True, True else: needsincoming, needsoutgoing = False, False for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None): if i: needsincoming = True if o: needsoutgoing = True if not needsincoming and not needsoutgoing: return def getincoming(): source, branches = hg.parseurl(ui.expandpath(b'default')) sbranch = branches[0] try: other = hg.peer(repo, {}, source) except error.RepoError: if opts.get(b'remote'): raise return source, sbranch, None, None, None revs, checkout = hg.addbranchrevs(repo, other, branches, None) if revs: revs = [other.lookup(rev) for rev in revs] ui.debug(b'comparing with %s\n' % util.hidepassword(source)) repo.ui.pushbuffer() commoninc = discovery.findcommonincoming(repo, other, heads=revs) repo.ui.popbuffer() return source, sbranch, other, commoninc, commoninc[1] if needsincoming: source, sbranch, sother, commoninc, incoming = getincoming() else: source = sbranch = sother = commoninc = incoming = None def getoutgoing(): dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default')) dbranch = branches[0] revs, checkout = hg.addbranchrevs(repo, repo, branches, None) if source != dest: try: dother = hg.peer(repo, {}, dest) except error.RepoError: if opts.get(b'remote'): raise return dest, dbranch, None, None ui.debug(b'comparing with %s\n' % util.hidepassword(dest)) elif sother is None: # there is no explicit destination peer, but source one is invalid return dest, dbranch, None, None else: dother = sother if source != dest or (sbranch is not None and sbranch != dbranch): common = None else: common = commoninc if revs: revs = [repo.lookup(rev) for rev in revs] repo.ui.pushbuffer() outgoing = discovery.findcommonoutgoing( repo, dother, onlyheads=revs, commoninc=common ) repo.ui.popbuffer() return dest, dbranch, dother, outgoing if needsoutgoing: dest, dbranch, dother, outgoing = getoutgoing() else: dest = dbranch = dother = outgoing = None if opts.get(b'remote'): t = [] if incoming: t.append(_(b'1 or more incoming')) o = outgoing.missing if o: t.append(_(b'%d outgoing') % len(o)) other = dother or sother if b'bookmarks' in other.listkeys(b'namespaces'): counts = bookmarks.summary(repo, other) if counts[0] > 0: t.append(_(b'%d incoming bookmarks') % counts[0]) if counts[1] > 0: t.append(_(b'%d outgoing bookmarks') % counts[1]) if t: # i18n: column positioning for "hg summary" ui.write(_(b'remote: %s\n') % (b', '.join(t))) else: # i18n: column positioning for "hg summary" ui.status(_(b'remote: (synced)\n')) cmdutil.summaryremotehooks( ui, repo, opts, ( (source, sbranch, sother, commoninc), (dest, dbranch, dother, outgoing), ), ) @command( b'tag', [ (b'f', b'force', None, _(b'force tag')), (b'l', b'local', None, _(b'make the tag local')), (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')), (b'', b'remove', None, _(b'remove a tag')), # -l/--local is already there, commitopts cannot be used (b'e', b'edit', None, _(b'invoke editor on commit messages')), (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')), ] + commitopts2, _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'), helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, ) def tag(ui, repo, name1, *names, **opts): """add one or more tags for the current or given revision Name a particular revision using . Tags are used to name particular revisions of the repository and are very useful to compare different revisions, to go back to significant earlier versions or to mark branch points as releases, etc. Changing an existing tag is normally disallowed; use -f/--force to override. If no revision is given, the parent of the working directory is used. To facilitate version control, distribution, and merging of tags, they are stored as a file named ".hgtags" which is managed similarly to other project files and can be hand-edited if necessary. This also means that tagging creates a new commit. The file ".hg/localtags" is used for local tags (not shared among repositories). Tag commits are usually made at the head of a branch. If the parent of the working directory is not a branch head, :hg:`tag` aborts; use -f/--force to force the tag commit to be based on a non-head changeset. See :hg:`help dates` for a list of formats valid for -d/--date. Since tag names have priority over branch names during revision lookup, using an existing branch name as a tag name is discouraged. Returns 0 on success. """ cmdutil.check_incompatible_arguments(opts, 'remove', ['rev']) opts = pycompat.byteskwargs(opts) with repo.wlock(), repo.lock(): rev_ = b"." names = [t.strip() for t in (name1,) + names] if len(names) != len(set(names)): raise error.Abort(_(b'tag names must be unique')) for n in names: scmutil.checknewlabel(repo, n, b'tag') if not n: raise error.Abort( _(b'tag names cannot consist entirely of whitespace') ) if opts.get(b'rev'): rev_ = opts[b'rev'] message = opts.get(b'message') if opts.get(b'remove'): if opts.get(b'local'): expectedtype = b'local' else: expectedtype = b'global' for n in names: if repo.tagtype(n) == b'global': alltags = tagsmod.findglobaltags(ui, repo) if alltags[n][0] == nullid: raise error.Abort(_(b"tag '%s' is already removed") % n) if not repo.tagtype(n): raise error.Abort(_(b"tag '%s' does not exist") % n) if repo.tagtype(n) != expectedtype: if expectedtype == b'global': raise error.Abort( _(b"tag '%s' is not a global tag") % n ) else: raise error.Abort(_(b"tag '%s' is not a local tag") % n) rev_ = b'null' if not message: # we don't translate commit messages message = b'Removed tag %s' % b', '.join(names) elif not opts.get(b'force'): for n in names: if n in repo.tags(): raise error.Abort( _(b"tag '%s' already exists (use -f to force)") % n ) if not opts.get(b'local'): p1, p2 = repo.dirstate.parents() if p2 != nullid: raise error.Abort(_(b'uncommitted merge')) bheads = repo.branchheads() if not opts.get(b'force') and bheads and p1 not in bheads: raise error.Abort( _( b'working directory is not at a branch head ' b'(use -f to force)' ) ) node = scmutil.revsingle(repo, rev_).node() if not message: # we don't translate commit messages message = b'Added tag %s for changeset %s' % ( b', '.join(names), short(node), ) date = opts.get(b'date') if date: date = dateutil.parsedate(date) if opts.get(b'remove'): editform = b'tag.remove' else: editform = b'tag.add' editor = cmdutil.getcommiteditor( editform=editform, **pycompat.strkwargs(opts) ) # don't allow tagging the null rev if ( not opts.get(b'remove') and scmutil.revsingle(repo, rev_).rev() == nullrev ): raise error.Abort(_(b"cannot tag null revision")) tagsmod.tag( repo, names, node, message, opts.get(b'local'), opts.get(b'user'), date, editor=editor, ) @command( b'tags', formatteropts, b'', helpcategory=command.CATEGORY_CHANGE_ORGANIZATION, intents={INTENT_READONLY}, ) def tags(ui, repo, **opts): """list repository tags This lists both regular and local tags. When the -v/--verbose switch is used, a third column "local" is printed for local tags. When the -q/--quiet switch is used, only the tag name is printed. .. container:: verbose Template: The following keywords are supported in addition to the common template keywords and functions such as ``{tag}``. See also :hg:`help templates`. :type: String. ``local`` for local tags. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) ui.pager(b'tags') fm = ui.formatter(b'tags', opts) hexfunc = fm.hexfunc for t, n in reversed(repo.tagslist()): hn = hexfunc(n) label = b'tags.normal' tagtype = b'' if repo.tagtype(t) == b'local': label = b'tags.local' tagtype = b'local' fm.startitem() fm.context(repo=repo) fm.write(b'tag', b'%s', t, label=label) fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s' fm.condwrite( not ui.quiet, b'rev node', fmt, repo.changelog.rev(n), hn, label=label, ) fm.condwrite( ui.verbose and tagtype, b'type', b' %s', tagtype, label=label ) fm.plain(b'\n') fm.end() @command( b'tip', [ (b'p', b'patch', None, _(b'show patch')), (b'g', b'git', None, _(b'use git extended diff format')), ] + templateopts, _(b'[-p] [-g]'), helpcategory=command.CATEGORY_CHANGE_NAVIGATION, ) def tip(ui, repo, **opts): """show the tip revision (DEPRECATED) The tip revision (usually just called the tip) is the changeset most recently added to the repository (and therefore the most recently changed head). If you have just made a commit, that commit will be the tip. If you have just pulled changes from another repository, the tip of that repository becomes the current tip. The "tip" tag is special and cannot be renamed or assigned to a different changeset. This command is deprecated, please use :hg:`heads` instead. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) displayer = logcmdutil.changesetdisplayer(ui, repo, opts) displayer.show(repo[b'tip']) displayer.close() @command( b'unbundle', [ ( b'u', b'update', None, _(b'update to new branch head if changesets were unbundled'), ) ], _(b'[-u] FILE...'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) def unbundle(ui, repo, fname1, *fnames, **opts): """apply one or more bundle files Apply one or more bundle files generated by :hg:`bundle`. Returns 0 on success, 1 if an update has unresolved files. """ fnames = (fname1,) + fnames with repo.lock(): for fname in fnames: f = hg.openpath(ui, fname) gen = exchange.readbundle(ui, f, fname) if isinstance(gen, streamclone.streamcloneapplier): raise error.Abort( _( b'packed bundles cannot be applied with ' b'"hg unbundle"' ), hint=_(b'use "hg debugapplystreamclonebundle"'), ) url = b'bundle:' + fname try: txnname = b'unbundle' if not isinstance(gen, bundle2.unbundle20): txnname = b'unbundle\n%s' % util.hidepassword(url) with repo.transaction(txnname) as tr: op = bundle2.applybundle( repo, gen, tr, source=b'unbundle', url=url ) except error.BundleUnknownFeatureError as exc: raise error.Abort( _(b'%s: unknown bundle feature, %s') % (fname, exc), hint=_( b"see https://mercurial-scm.org/" b"wiki/BundleFeature for more " b"information" ), ) modheads = bundle2.combinechangegroupresults(op) return postincoming(ui, repo, modheads, opts.get('update'), None, None) @command( b'unshelve', [ (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')), ( b'c', b'continue', None, _(b'continue an incomplete unshelve operation'), ), (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')), (b'k', b'keep', None, _(b'keep shelve after unshelving')), ( b'n', b'name', b'', _(b'restore shelved change with given name'), _(b'NAME'), ), (b't', b'tool', b'', _(b'specify merge tool')), ( b'', b'date', b'', _(b'set date for temporary commits (DEPRECATED)'), _(b'DATE'), ), ], _(b'hg unshelve [OPTION]... [[-n] SHELVED]'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, ) def unshelve(ui, repo, *shelved, **opts): """restore a shelved change to the working directory This command accepts an optional name of a shelved change to restore. If none is given, the most recent shelved change is used. If a shelved change is applied successfully, the bundle that contains the shelved changes is moved to a backup location (.hg/shelve-backup). Since you can restore a shelved change on top of an arbitrary commit, it is possible that unshelving will result in a conflict between your changes and the commits you are unshelving onto. If this occurs, you must resolve the conflict, then use ``--continue`` to complete the unshelve operation. (The bundle will not be moved until you successfully complete the unshelve.) (Alternatively, you can use ``--abort`` to abandon an unshelve that causes a conflict. This reverts the unshelved changes, and leaves the bundle in place.) If bare shelved change (without interactive, include and exclude option) was done on newly created branch it would restore branch information to the working directory. After a successful unshelve, the shelved changes are stored in a backup directory. Only the N most recent backups are kept. N defaults to 10 but can be overridden using the ``shelve.maxbackups`` configuration option. .. container:: verbose Timestamp in seconds is used to decide order of backups. More than ``maxbackups`` backups are kept, if same timestamp prevents from deciding exact order of them, for safety. Selected changes can be unshelved with ``--interactive`` flag. The working directory is updated with the selected changes, and only the unselected changes remain shelved. Note: The whole shelve is applied to working directory first before running interactively. So, this will bring up all the conflicts between working directory and the shelve, irrespective of which changes will be unshelved. """ with repo.wlock(): return shelvemod.unshelvecmd(ui, repo, *shelved, **opts) statemod.addunfinished( b'unshelve', fname=b'shelvedstate', continueflag=True, abortfunc=shelvemod.hgabortunshelve, continuefunc=shelvemod.hgcontinueunshelve, cmdmsg=_(b'unshelve already in progress'), ) @command( b'update|up|checkout|co', [ (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')), (b'c', b'check', None, _(b'require clean working directory')), (b'm', b'merge', None, _(b'merge uncommitted changes')), (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')), (b'r', b'rev', b'', _(b'revision'), _(b'REV')), ] + mergetoolopts, _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'), helpcategory=command.CATEGORY_WORKING_DIRECTORY, helpbasic=True, ) def update(ui, repo, node=None, **opts): """update working directory (or switch revisions) Update the repository's working directory to the specified changeset. If no changeset is specified, update to the tip of the current named branch and move the active bookmark (see :hg:`help bookmarks`). Update sets the working directory's parent revision to the specified changeset (see :hg:`help parents`). If the changeset is not a descendant or ancestor of the working directory's parent and there are uncommitted changes, the update is aborted. With the -c/--check option, the working directory is checked for uncommitted changes; if none are found, the working directory is updated to the specified changeset. .. container:: verbose The -C/--clean, -c/--check, and -m/--merge options control what happens if the working directory contains uncommitted changes. At most of one of them can be specified. 1. If no option is specified, and if the requested changeset is an ancestor or descendant of the working directory's parent, the uncommitted changes are merged into the requested changeset and the merged result is left uncommitted. If the requested changeset is not an ancestor or descendant (that is, it is on another branch), the update is aborted and the uncommitted changes are preserved. 2. With the -m/--merge option, the update is allowed even if the requested changeset is not an ancestor or descendant of the working directory's parent. 3. With the -c/--check option, the update is aborted and the uncommitted changes are preserved. 4. With the -C/--clean option, uncommitted changes are discarded and the working directory is updated to the requested changeset. To cancel an uncommitted merge (and lose your changes), use :hg:`merge --abort`. Use null as the changeset to remove the working directory (like :hg:`clone -U`). If you want to revert just one file to an older revision, use :hg:`revert [-r REV] NAME`. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if there are unresolved files. """ cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge') rev = opts.get('rev') date = opts.get('date') clean = opts.get('clean') check = opts.get('check') merge = opts.get('merge') if rev and node: raise error.Abort(_(b"please specify just one revision")) if ui.configbool(b'commands', b'update.requiredest'): if not node and not rev and not date: raise error.Abort( _(b'you must specify a destination'), hint=_(b'for example: hg update ".::"'), ) if rev is None or rev == b'': rev = node if date and rev is not None: raise error.Abort(_(b"you can't specify a revision and a date")) updatecheck = None if check: updatecheck = b'abort' elif merge: updatecheck = b'none' with repo.wlock(): cmdutil.clearunfinished(repo) if date: rev = cmdutil.finddate(ui, repo, date) # if we defined a bookmark, we have to remember the original name brev = rev if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = scmutil.revsingle(repo, rev, default=None) rev = ctx.rev() hidden = ctx.hidden() overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')} with ui.configoverride(overrides, b'update'): ret = hg.updatetotally( ui, repo, rev, brev, clean=clean, updatecheck=updatecheck ) if hidden: ctxstr = ctx.hex()[:12] ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr) if ctx.obsolete(): obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx) ui.warn(b"(%s)\n" % obsfatemsg) return ret @command( b'verify', [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')], helpcategory=command.CATEGORY_MAINTENANCE, ) def verify(ui, repo, **opts): """verify the integrity of the repository Verify the integrity of the current repository. This will perform an extensive check of the repository's integrity, validating the hashes and checksums of each entry in the changelog, manifest, and tracked files, as well as the integrity of their crosslinks and indices. Please see https://mercurial-scm.org/wiki/RepositoryCorruption for more information about recovery from corruption of the repository. Returns 0 on success, 1 if errors are encountered. """ opts = pycompat.byteskwargs(opts) level = None if opts[b'full']: level = verifymod.VERIFY_FULL return hg.verify(repo, level) @command( b'version', [] + formatteropts, helpcategory=command.CATEGORY_HELP, norepo=True, intents={INTENT_READONLY}, ) def version_(ui, **opts): """output version and copyright information .. container:: verbose Template: The following keywords are supported. See also :hg:`help templates`. :extensions: List of extensions. :ver: String. Version number. And each entry of ``{extensions}`` provides the following sub-keywords in addition to ``{ver}``. :bundled: Boolean. True if included in the release. :name: String. Extension name. """ opts = pycompat.byteskwargs(opts) if ui.verbose: ui.pager(b'version') fm = ui.formatter(b"version", opts) fm.startitem() fm.write( b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version() ) license = _( b"(see https://mercurial-scm.org for more information)\n" b"\nCopyright (C) 2005-2020 Matt Mackall and others\n" b"This is free software; see the source for copying conditions. " b"There is NO\nwarranty; " b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" ) if not ui.quiet: fm.plain(license) if ui.verbose: fm.plain(_(b"\nEnabled extensions:\n\n")) # format names and versions into columns names = [] vers = [] isinternals = [] for name, module in sorted(extensions.extensions()): names.append(name) vers.append(extensions.moduleversion(module) or None) isinternals.append(extensions.ismoduleinternal(module)) fn = fm.nested(b"extensions", tmpl=b'{name}\n') if names: namefmt = b" %%-%ds " % max(len(n) for n in names) places = [_(b"external"), _(b"internal")] for n, v, p in zip(names, vers, isinternals): fn.startitem() fn.condwrite(ui.verbose, b"name", namefmt, n) if ui.verbose: fn.plain(b"%s " % places[p]) fn.data(bundled=p) fn.condwrite(ui.verbose and v, b"ver", b"%s", v) if ui.verbose: fn.plain(b"\n") fn.end() fm.end() def loadcmdtable(ui, name, cmdtable): """Load command functions from specified cmdtable """ overrides = [cmd for cmd in cmdtable if cmd in table] if overrides: ui.warn( _(b"extension '%s' overrides commands: %s\n") % (name, b" ".join(overrides)) ) table.update(cmdtable) diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py --- a/mercurial/dispatch.py +++ b/mercurial/dispatch.py @@ -1,1363 +1,1365 @@ # dispatch.py - command dispatching for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import, print_function import difflib import errno import getopt import io import os import pdb import re import signal import sys import traceback from .i18n import _ from .pycompat import getattr from hgdemandimport import tracing from . import ( cmdutil, color, commands, demandimport, encoding, error, extensions, fancyopts, help, hg, hook, localrepo, profiling, pycompat, rcutil, registrar, requirements as requirementsmod, scmutil, ui as uimod, util, vfs, ) from .utils import ( procutil, stringutil, ) class request(object): def __init__( self, args, ui=None, repo=None, fin=None, fout=None, ferr=None, fmsg=None, prereposetups=None, ): self.args = args self.ui = ui self.repo = repo # input/output/error streams self.fin = fin self.fout = fout self.ferr = ferr # separate stream for status/error messages self.fmsg = fmsg # remember options pre-parsed by _earlyparseopts() self.earlyoptions = {} # reposetups which run before extensions, useful for chg to pre-fill # low-level repo state (for example, changelog) before extensions. self.prereposetups = prereposetups or [] # store the parsed and canonical command self.canonical_command = None def _runexithandlers(self): exc = None handlers = self.ui._exithandlers try: while handlers: func, args, kwargs = handlers.pop() try: func(*args, **kwargs) except: # re-raises below if exc is None: exc = sys.exc_info()[1] self.ui.warnnoi18n(b'error in exit handlers:\n') self.ui.traceback(force=True) finally: if exc is not None: raise exc def run(): """run the command in sys.argv""" try: initstdio() with tracing.log('parse args into request'): req = request(pycompat.sysargv[1:]) err = None try: status = dispatch(req) except error.StdioError as e: err = e status = -1 # In all cases we try to flush stdio streams. if util.safehasattr(req.ui, b'fout'): assert req.ui is not None # help pytype assert req.ui.fout is not None # help pytype try: req.ui.fout.flush() except IOError as e: err = e status = -1 if util.safehasattr(req.ui, b'ferr'): assert req.ui is not None # help pytype assert req.ui.ferr is not None # help pytype try: if err is not None and err.errno != errno.EPIPE: req.ui.ferr.write( b'abort: %s\n' % encoding.strtolocal(err.strerror) ) req.ui.ferr.flush() # There's not much we can do about an I/O error here. So (possibly) # change the status code and move on. except IOError: status = -1 _silencestdio() except KeyboardInterrupt: # Catch early/late KeyboardInterrupt as last ditch. Here nothing will # be printed to console to avoid another IOError/KeyboardInterrupt. status = -1 sys.exit(status & 255) if pycompat.ispy3: def initstdio(): # stdio streams on Python 3 are io.TextIOWrapper instances proxying another # buffer. These streams will normalize \n to \r\n by default. Mercurial's # preferred mechanism for writing output (ui.write()) uses io.BufferedWriter # instances, which write to the underlying stdio file descriptor in binary # mode. ui.write() uses \n for line endings and no line ending normalization # is attempted through this interface. This "just works," even if the system # preferred line ending is not \n. # # But some parts of Mercurial (e.g. hooks) can still send data to sys.stdout # and sys.stderr. They will inherit the line ending normalization settings, # potentially causing e.g. \r\n to be emitted. Since emitting \n should # "just work," here we change the sys.* streams to disable line ending # normalization, ensuring compatibility with our ui type. # write_through is new in Python 3.7. kwargs = { "newline": "\n", "line_buffering": sys.stdout.line_buffering, } if util.safehasattr(sys.stdout, "write_through"): kwargs["write_through"] = sys.stdout.write_through sys.stdout = io.TextIOWrapper( sys.stdout.buffer, sys.stdout.encoding, sys.stdout.errors, **kwargs ) kwargs = { "newline": "\n", "line_buffering": sys.stderr.line_buffering, } if util.safehasattr(sys.stderr, "write_through"): kwargs["write_through"] = sys.stderr.write_through sys.stderr = io.TextIOWrapper( sys.stderr.buffer, sys.stderr.encoding, sys.stderr.errors, **kwargs ) # No write_through on read-only stream. sys.stdin = io.TextIOWrapper( sys.stdin.buffer, sys.stdin.encoding, sys.stdin.errors, # None is universal newlines mode. newline=None, line_buffering=sys.stdin.line_buffering, ) def _silencestdio(): for fp in (sys.stdout, sys.stderr): # Check if the file is okay try: fp.flush() continue except IOError: pass # Otherwise mark it as closed to silence "Exception ignored in" # message emitted by the interpreter finalizer. Be careful to # not close procutil.stdout, which may be a fdopen-ed file object # and its close() actually closes the underlying file descriptor. try: fp.close() except IOError: pass else: def initstdio(): for fp in (sys.stdin, sys.stdout, sys.stderr): procutil.setbinary(fp) def _silencestdio(): pass def _getsimilar(symbols, value): sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() # The cutoff for similarity here is pretty arbitrary. It should # probably be investigated and tweaked. return [s for s in symbols if sim(s) > 0.6] def _reportsimilar(write, similar): if len(similar) == 1: write(_(b"(did you mean %s?)\n") % similar[0]) elif similar: ss = b", ".join(sorted(similar)) write(_(b"(did you mean one of %s?)\n") % ss) def _formatparse(write, inst): similar = [] if isinstance(inst, error.UnknownIdentifier): # make sure to check fileset first, as revset can invoke fileset similar = _getsimilar(inst.symbols, inst.function) if inst.location is not None: write( _(b"hg: parse error at %s: %s\n") % (pycompat.bytestr(inst.location), inst.message) ) else: write(_(b"hg: parse error: %s\n") % inst.message) _reportsimilar(write, similar) if inst.hint: write(_(b"(%s)\n") % inst.hint) def _formatargs(args): return b' '.join(procutil.shellquote(a) for a in args) def dispatch(req): """run the command specified in req.args; returns an integer status code""" with tracing.log('dispatch.dispatch'): if req.ferr: ferr = req.ferr elif req.ui: ferr = req.ui.ferr else: ferr = procutil.stderr try: if not req.ui: req.ui = uimod.ui.load() req.earlyoptions.update(_earlyparseopts(req.ui, req.args)) if req.earlyoptions[b'traceback']: req.ui.setconfig(b'ui', b'traceback', b'on', b'--traceback') # set ui streams from the request if req.fin: req.ui.fin = req.fin if req.fout: req.ui.fout = req.fout if req.ferr: req.ui.ferr = req.ferr if req.fmsg: req.ui.fmsg = req.fmsg except error.Abort as inst: ferr.write(_(b"abort: %s\n") % inst.message) if inst.hint: ferr.write(_(b"(%s)\n") % inst.hint) return -1 except error.ParseError as inst: _formatparse(ferr.write, inst) return -1 msg = _formatargs(req.args) starttime = util.timer() ret = 1 # default of Python exit code on unhandled exception try: ret = _runcatch(req) or 0 except error.ProgrammingError as inst: req.ui.error(_(b'** ProgrammingError: %s\n') % inst) if inst.hint: req.ui.error(_(b'** (%s)\n') % inst.hint) raise except KeyboardInterrupt as inst: try: if isinstance(inst, error.SignalInterrupt): msg = _(b"killed!\n") else: msg = _(b"interrupted!\n") req.ui.error(msg) except error.SignalInterrupt: # maybe pager would quit without consuming all the output, and # SIGPIPE was raised. we cannot print anything in this case. pass except IOError as inst: if inst.errno != errno.EPIPE: raise ret = -1 finally: duration = util.timer() - starttime req.ui.flush() # record blocked times if req.ui.logblockedtimes: req.ui._blockedtimes[b'command_duration'] = duration * 1000 req.ui.log( b'uiblocked', b'ui blocked ms\n', **pycompat.strkwargs(req.ui._blockedtimes) ) return_code = ret & 255 req.ui.log( b"commandfinish", b"%s exited %d after %0.2f seconds\n", msg, return_code, duration, return_code=return_code, duration=duration, canonical_command=req.canonical_command, ) try: req._runexithandlers() except: # exiting, so no re-raises ret = ret or -1 # do flush again since ui.log() and exit handlers may write to ui req.ui.flush() return ret def _runcatch(req): with tracing.log('dispatch._runcatch'): def catchterm(*args): raise error.SignalInterrupt ui = req.ui try: for name in b'SIGBREAK', b'SIGHUP', b'SIGTERM': num = getattr(signal, name, None) if num: signal.signal(num, catchterm) except ValueError: pass # happens if called in a thread def _runcatchfunc(): realcmd = None try: cmdargs = fancyopts.fancyopts( req.args[:], commands.globalopts, {} ) cmd = cmdargs[0] aliases, entry = cmdutil.findcmd(cmd, commands.table, False) realcmd = aliases[0] except ( error.UnknownCommand, error.AmbiguousCommand, IndexError, getopt.GetoptError, ): # Don't handle this here. We know the command is # invalid, but all we're worried about for now is that # it's not a command that server operators expect to # be safe to offer to users in a sandbox. pass if realcmd == b'serve' and b'--stdio' in cmdargs: # We want to constrain 'hg serve --stdio' instances pretty # closely, as many shared-ssh access tools want to grant # access to run *only* 'hg -R $repo serve --stdio'. We # restrict to exactly that set of arguments, and prohibit # any repo name that starts with '--' to prevent # shenanigans wherein a user does something like pass # --debugger or --config=ui.debugger=1 as a repo # name. This used to actually run the debugger. if ( len(req.args) != 4 or req.args[0] != b'-R' or req.args[1].startswith(b'--') or req.args[2] != b'serve' or req.args[3] != b'--stdio' ): raise error.Abort( _(b'potentially unsafe serve --stdio invocation: %s') % (stringutil.pprint(req.args),) ) try: debugger = b'pdb' debugtrace = {b'pdb': pdb.set_trace} debugmortem = {b'pdb': pdb.post_mortem} # read --config before doing anything else # (e.g. to change trust settings for reading .hg/hgrc) cfgs = _parseconfig(req.ui, req.earlyoptions[b'config']) if req.repo: # copy configs that were passed on the cmdline (--config) to # the repo ui for sec, name, val in cfgs: req.repo.ui.setconfig( sec, name, val, source=b'--config' ) # developer config: ui.debugger debugger = ui.config(b"ui", b"debugger") debugmod = pdb if not debugger or ui.plain(): # if we are in HGPLAIN mode, then disable custom debugging debugger = b'pdb' elif req.earlyoptions[b'debugger']: # This import can be slow for fancy debuggers, so only # do it when absolutely necessary, i.e. when actual # debugging has been requested with demandimport.deactivated(): try: debugmod = __import__(debugger) except ImportError: pass # Leave debugmod = pdb debugtrace[debugger] = debugmod.set_trace debugmortem[debugger] = debugmod.post_mortem # enter the debugger before command execution if req.earlyoptions[b'debugger']: ui.warn( _( b"entering debugger - " b"type c to continue starting hg or h for help\n" ) ) if ( debugger != b'pdb' and debugtrace[debugger] == debugtrace[b'pdb'] ): ui.warn( _( b"%s debugger specified " b"but its module was not found\n" ) % debugger ) with demandimport.deactivated(): debugtrace[debugger]() try: return _dispatch(req) finally: ui.flush() except: # re-raises # enter the debugger when we hit an exception if req.earlyoptions[b'debugger']: traceback.print_exc() debugmortem[debugger](sys.exc_info()[2]) raise return _callcatch(ui, _runcatchfunc) def _callcatch(ui, func): """like scmutil.callcatch but handles more high-level exceptions about config parsing and commands. besides, use handlecommandexception to handle uncaught exceptions. """ try: return scmutil.callcatch(ui, func) except error.AmbiguousCommand as inst: ui.warn( _(b"hg: command '%s' is ambiguous:\n %s\n") % (inst.prefix, b" ".join(inst.matches)) ) except error.CommandError as inst: if inst.command: ui.pager(b'help') msgbytes = pycompat.bytestr(inst.message) ui.warn(_(b"hg %s: %s\n") % (inst.command, msgbytes)) commands.help_(ui, inst.command, full=False, command=True) else: ui.warn(_(b"hg: %s\n") % inst.message) ui.warn(_(b"(use 'hg help -v' for a list of global options)\n")) except error.ParseError as inst: _formatparse(ui.warn, inst) return -1 except error.UnknownCommand as inst: nocmdmsg = _(b"hg: unknown command '%s'\n") % inst.command try: # check if the command is in a disabled extension # (but don't check for extensions themselves) formatted = help.formattedhelp( ui, commands, inst.command, unknowncmd=True ) ui.warn(nocmdmsg) ui.write(formatted) except (error.UnknownCommand, error.Abort): suggested = False if inst.all_commands: sim = _getsimilar(inst.all_commands, inst.command) if sim: ui.warn(nocmdmsg) _reportsimilar(ui.warn, sim) suggested = True if not suggested: ui.warn(nocmdmsg) ui.warn(_(b"(use 'hg help' for a list of commands)\n")) except IOError: raise except KeyboardInterrupt: raise except: # probably re-raises if not handlecommandexception(ui): raise return -1 def aliasargs(fn, givenargs): args = [] # only care about alias 'args', ignore 'args' set by extensions.wrapfunction if not util.safehasattr(fn, b'_origfunc'): args = getattr(fn, 'args', args) if args: cmd = b' '.join(map(procutil.shellquote, args)) nums = [] def replacer(m): num = int(m.group(1)) - 1 nums.append(num) if num < len(givenargs): return givenargs[num] raise error.Abort(_(b'too few arguments for command alias')) cmd = re.sub(br'\$(\d+|\$)', replacer, cmd) givenargs = [x for i, x in enumerate(givenargs) if i not in nums] args = pycompat.shlexsplit(cmd) return args + givenargs def aliasinterpolate(name, args, cmd): '''interpolate args into cmd for shell aliases This also handles $0, $@ and "$@". ''' # util.interpolate can't deal with "$@" (with quotes) because it's only # built to match prefix + patterns. replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)} replacemap[b'$0'] = name replacemap[b'$$'] = b'$' replacemap[b'$@'] = b' '.join(args) # Typical Unix shells interpolate "$@" (with quotes) as all the positional # parameters, separated out into words. Emulate the same behavior here by # quoting the arguments individually. POSIX shells will then typically # tokenize each argument into exactly one word. replacemap[b'"$@"'] = b' '.join(procutil.shellquote(arg) for arg in args) # escape '\$' for regex regex = b'|'.join(replacemap.keys()).replace(b'$', br'\$') r = re.compile(regex) return r.sub(lambda x: replacemap[x.group()], cmd) class cmdalias(object): def __init__(self, ui, name, definition, cmdtable, source): self.name = self.cmd = name self.cmdname = b'' self.definition = definition self.fn = None self.givenargs = [] self.opts = [] self.help = b'' self.badalias = None self.unknowncmd = False self.source = source try: aliases, entry = cmdutil.findcmd(self.name, cmdtable) for alias, e in pycompat.iteritems(cmdtable): if e is entry: self.cmd = alias break self.shadows = True except error.UnknownCommand: self.shadows = False if not self.definition: self.badalias = _(b"no definition for alias '%s'") % self.name return if self.definition.startswith(b'!'): shdef = self.definition[1:] self.shell = True def fn(ui, *args): env = {b'HG_ARGS': b' '.join((self.name,) + args)} def _checkvar(m): if m.groups()[0] == b'$': return m.group() elif int(m.groups()[0]) <= len(args): return m.group() else: ui.debug( b"No argument found for substitution " b"of %i variable in alias '%s' definition.\n" % (int(m.groups()[0]), self.name) ) return b'' cmd = re.sub(br'\$(\d+|\$)', _checkvar, shdef) cmd = aliasinterpolate(self.name, args, cmd) return ui.system( cmd, environ=env, blockedtag=b'alias_%s' % self.name ) self.fn = fn self.alias = True self._populatehelp(ui, name, shdef, self.fn) return try: args = pycompat.shlexsplit(self.definition) except ValueError as inst: self.badalias = _(b"error in definition for alias '%s': %s") % ( self.name, stringutil.forcebytestr(inst), ) return earlyopts, args = _earlysplitopts(args) if earlyopts: self.badalias = _( b"error in definition for alias '%s': %s may " b"only be given on the command line" ) % (self.name, b'/'.join(pycompat.ziplist(*earlyopts)[0])) return self.cmdname = cmd = args.pop(0) self.givenargs = args try: tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] if len(tableentry) > 2: self.fn, self.opts, cmdhelp = tableentry else: self.fn, self.opts = tableentry cmdhelp = None self.alias = True self._populatehelp(ui, name, cmd, self.fn, cmdhelp) except error.UnknownCommand: self.badalias = _( b"alias '%s' resolves to unknown command '%s'" ) % (self.name, cmd,) self.unknowncmd = True except error.AmbiguousCommand: self.badalias = _( b"alias '%s' resolves to ambiguous command '%s'" ) % (self.name, cmd,) def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None): # confine strings to be passed to i18n.gettext() cfg = {} for k in (b'doc', b'help', b'category'): v = ui.config(b'alias', b'%s:%s' % (name, k), None) if v is None: continue if not encoding.isasciistr(v): self.badalias = _( b"non-ASCII character in alias definition '%s:%s'" ) % (name, k) return cfg[k] = v self.help = cfg.get(b'help', defaulthelp or b'') if self.help and self.help.startswith(b"hg " + cmd): # drop prefix in old-style help lines so hg shows the alias self.help = self.help[4 + len(cmd) :] self.owndoc = b'doc' in cfg doc = cfg.get(b'doc', pycompat.getdoc(fn)) if doc is not None: doc = pycompat.sysstr(doc) self.__doc__ = doc self.helpcategory = cfg.get( b'category', registrar.command.CATEGORY_NONE ) @property def args(self): args = pycompat.maplist(util.expandpath, self.givenargs) return aliasargs(self.fn, args) def __getattr__(self, name): adefaults = { 'norepo': True, 'intents': set(), 'optionalrepo': False, 'inferrepo': False, } if name not in adefaults: raise AttributeError(name) if self.badalias or util.safehasattr(self, b'shell'): return adefaults[name] return getattr(self.fn, name) def __call__(self, ui, *args, **opts): if self.badalias: hint = None if self.unknowncmd: try: # check if the command is in a disabled extension cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2] hint = _(b"'%s' is provided by '%s' extension") % (cmd, ext) except error.UnknownCommand: pass raise error.Abort(self.badalias, hint=hint) if self.shadows: ui.debug( b"alias '%s' shadows command '%s'\n" % (self.name, self.cmdname) ) ui.log( b'commandalias', b"alias '%s' expands to '%s'\n", self.name, self.definition, ) if util.safehasattr(self, b'shell'): return self.fn(ui, *args, **opts) else: try: return util.checksignature(self.fn)(ui, *args, **opts) except error.SignatureError: args = b' '.join([self.cmdname] + self.args) ui.debug(b"alias '%s' expands to '%s'\n" % (self.name, args)) raise class lazyaliasentry(object): """like a typical command entry (func, opts, help), but is lazy""" def __init__(self, ui, name, definition, cmdtable, source): self.ui = ui self.name = name self.definition = definition self.cmdtable = cmdtable.copy() self.source = source self.alias = True @util.propertycache def _aliasdef(self): return cmdalias( self.ui, self.name, self.definition, self.cmdtable, self.source ) def __getitem__(self, n): aliasdef = self._aliasdef if n == 0: return aliasdef elif n == 1: return aliasdef.opts elif n == 2: return aliasdef.help else: raise IndexError def __iter__(self): for i in range(3): yield self[i] def __len__(self): return 3 def addaliases(ui, cmdtable): # aliases are processed after extensions have been loaded, so they # may use extension commands. Aliases can also use other alias definitions, # but only if they have been defined prior to the current definition. for alias, definition in ui.configitems(b'alias', ignoresub=True): try: if cmdtable[alias].definition == definition: continue except (KeyError, AttributeError): # definition might not exist or it might not be a cmdalias pass source = ui.configsource(b'alias', alias) entry = lazyaliasentry(ui, alias, definition, cmdtable, source) cmdtable[alias] = entry def _parse(ui, args): options = {} cmdoptions = {} try: args = fancyopts.fancyopts(args, commands.globalopts, options) except getopt.GetoptError as inst: raise error.CommandError(None, stringutil.forcebytestr(inst)) if args: cmd, args = args[0], args[1:] aliases, entry = cmdutil.findcmd( cmd, commands.table, ui.configbool(b"ui", b"strict") ) cmd = aliases[0] args = aliasargs(entry[0], args) defaults = ui.config(b"defaults", cmd) if defaults: args = ( pycompat.maplist(util.expandpath, pycompat.shlexsplit(defaults)) + args ) c = list(entry[1]) else: cmd = None c = [] # combine global options into local for o in commands.globalopts: c.append((o[0], o[1], options[o[1]], o[3])) try: args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True) except getopt.GetoptError as inst: raise error.CommandError(cmd, stringutil.forcebytestr(inst)) # separate global options back out for o in commands.globalopts: n = o[1] options[n] = cmdoptions[n] del cmdoptions[n] return (cmd, cmd and entry[0] or None, args, options, cmdoptions) def _parseconfig(ui, config): """parse the --config options from the command line""" configs = [] for cfg in config: try: name, value = [cfgelem.strip() for cfgelem in cfg.split(b'=', 1)] section, name = name.split(b'.', 1) if not section or not name: raise IndexError ui.setconfig(section, name, value, b'--config') configs.append((section, name, value)) except (IndexError, ValueError): raise error.Abort( _( b'malformed --config option: %r ' b'(use --config section.name=value)' ) % pycompat.bytestr(cfg) ) return configs def _earlyparseopts(ui, args): options = {} fancyopts.fancyopts( args, commands.globalopts, options, gnu=not ui.plain(b'strictflags'), early=True, optaliases={b'repository': [b'repo']}, ) return options def _earlysplitopts(args): """Split args into a list of possible early options and remainder args""" shortoptions = b'R:' # TODO: perhaps 'debugger' should be included longoptions = [b'cwd=', b'repository=', b'repo=', b'config='] return fancyopts.earlygetopt( args, shortoptions, longoptions, gnu=True, keepsep=True ) def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): # run pre-hook, and abort if it fails hook.hook( lui, repo, b"pre-%s" % cmd, True, args=b" ".join(fullargs), pats=cmdpats, opts=cmdoptions, ) try: ret = _runcommand(ui, options, cmd, d) # run post-hook, passing command result hook.hook( lui, repo, b"post-%s" % cmd, False, args=b" ".join(fullargs), result=ret, pats=cmdpats, opts=cmdoptions, ) except Exception: # run failure hook and re-raise hook.hook( lui, repo, b"fail-%s" % cmd, False, args=b" ".join(fullargs), pats=cmdpats, opts=cmdoptions, ) raise return ret def _readsharedsourceconfig(ui, path): """if the current repository is shared one, this tries to read .hg/hgrc of shared source if we are in share-safe mode Config read is loaded into the ui object passed This should be called before reading .hg/hgrc or the main repo as that overrides config set in shared source""" try: with open(os.path.join(path, b".hg", b"requires"), "rb") as fp: requirements = set(fp.read().splitlines()) if not ( requirementsmod.SHARESAFE_REQUIREMENT in requirements and requirementsmod.SHARED_REQUIREMENT in requirements ): return hgvfs = vfs.vfs(os.path.join(path, b".hg")) sharedvfs = localrepo._getsharedvfs(hgvfs, requirements) ui.readconfig(sharedvfs.join(b"hgrc"), path) except IOError: pass def _getlocal(ui, rpath, wd=None): """Return (path, local ui object) for the given target path. Takes paths in [cwd]/.hg/hgrc into account." """ if wd is None: try: wd = encoding.getcwd() except OSError as e: raise error.Abort( _(b"error getting current working directory: %s") % encoding.strtolocal(e.strerror) ) path = cmdutil.findrepo(wd) or b"" if not path: lui = ui else: lui = ui.copy() if rcutil.use_repo_hgrc(): _readsharedsourceconfig(lui, path) lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path) + lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path) if rpath: path = lui.expandpath(rpath) lui = ui.copy() if rcutil.use_repo_hgrc(): _readsharedsourceconfig(lui, path) lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path) + lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path) return path, lui def _checkshellalias(lui, ui, args): """Return the function to run the shell alias, if it is required""" options = {} try: args = fancyopts.fancyopts(args, commands.globalopts, options) except getopt.GetoptError: return if not args: return cmdtable = commands.table cmd = args[0] try: strict = ui.configbool(b"ui", b"strict") aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict) except (error.AmbiguousCommand, error.UnknownCommand): return cmd = aliases[0] fn = entry[0] if cmd and util.safehasattr(fn, b'shell'): # shell alias shouldn't receive early options which are consumed by hg _earlyopts, args = _earlysplitopts(args) d = lambda: fn(ui, *args[1:]) return lambda: runcommand( lui, None, cmd, args[:1], ui, options, d, [], {} ) def _dispatch(req): args = req.args ui = req.ui # check for cwd cwd = req.earlyoptions[b'cwd'] if cwd: os.chdir(cwd) rpath = req.earlyoptions[b'repository'] path, lui = _getlocal(ui, rpath) uis = {ui, lui} if req.repo: uis.add(req.repo.ui) if ( req.earlyoptions[b'verbose'] or req.earlyoptions[b'debug'] or req.earlyoptions[b'quiet'] ): for opt in (b'verbose', b'debug', b'quiet'): val = pycompat.bytestr(bool(req.earlyoptions[opt])) for ui_ in uis: ui_.setconfig(b'ui', opt, val, b'--' + opt) if req.earlyoptions[b'profile']: for ui_ in uis: ui_.setconfig(b'profiling', b'enabled', b'true', b'--profile') profile = lui.configbool(b'profiling', b'enabled') with profiling.profile(lui, enabled=profile) as profiler: # Configure extensions in phases: uisetup, extsetup, cmdtable, and # reposetup extensions.loadall(lui) # Propagate any changes to lui.__class__ by extensions ui.__class__ = lui.__class__ # (uisetup and extsetup are handled in extensions.loadall) # (reposetup is handled in hg.repository) addaliases(lui, commands.table) # All aliases and commands are completely defined, now. # Check abbreviation/ambiguity of shell alias. shellaliasfn = _checkshellalias(lui, ui, args) if shellaliasfn: # no additional configs will be set, set up the ui instances for ui_ in uis: extensions.populateui(ui_) return shellaliasfn() # check for fallback encoding fallback = lui.config(b'ui', b'fallbackencoding') if fallback: encoding.fallbackencoding = fallback fullargs = args cmd, func, args, options, cmdoptions = _parse(lui, args) # store the canonical command name in request object for later access req.canonical_command = cmd if options[b"config"] != req.earlyoptions[b"config"]: raise error.Abort(_(b"option --config may not be abbreviated!")) if options[b"cwd"] != req.earlyoptions[b"cwd"]: raise error.Abort(_(b"option --cwd may not be abbreviated!")) if options[b"repository"] != req.earlyoptions[b"repository"]: raise error.Abort( _( b"option -R has to be separated from other options (e.g. not " b"-qR) and --repository may only be abbreviated as --repo!" ) ) if options[b"debugger"] != req.earlyoptions[b"debugger"]: raise error.Abort(_(b"option --debugger may not be abbreviated!")) # don't validate --profile/--traceback, which can be enabled from now if options[b"encoding"]: encoding.encoding = options[b"encoding"] if options[b"encodingmode"]: encoding.encodingmode = options[b"encodingmode"] if options[b"time"]: def get_times(): t = os.times() if t[4] == 0.0: # Windows leaves this as zero, so use time.perf_counter() t = (t[0], t[1], t[2], t[3], util.timer()) return t s = get_times() def print_time(): t = get_times() ui.warn( _(b"time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % ( t[4] - s[4], t[0] - s[0], t[2] - s[2], t[1] - s[1], t[3] - s[3], ) ) ui.atexit(print_time) if options[b"profile"]: profiler.start() # if abbreviated version of this were used, take them in account, now if options[b'verbose'] or options[b'debug'] or options[b'quiet']: for opt in (b'verbose', b'debug', b'quiet'): if options[opt] == req.earlyoptions[opt]: continue val = pycompat.bytestr(bool(options[opt])) for ui_ in uis: ui_.setconfig(b'ui', opt, val, b'--' + opt) if options[b'traceback']: for ui_ in uis: ui_.setconfig(b'ui', b'traceback', b'on', b'--traceback') if options[b'noninteractive']: for ui_ in uis: ui_.setconfig(b'ui', b'interactive', b'off', b'-y') if cmdoptions.get(b'insecure', False): for ui_ in uis: ui_.insecureconnections = True # setup color handling before pager, because setting up pager # might cause incorrect console information coloropt = options[b'color'] for ui_ in uis: if coloropt: ui_.setconfig(b'ui', b'color', coloropt, b'--color') color.setup(ui_) if stringutil.parsebool(options[b'pager']): # ui.pager() expects 'internal-always-' prefix in this case ui.pager(b'internal-always-' + cmd) elif options[b'pager'] != b'auto': for ui_ in uis: ui_.disablepager() # configs are fully loaded, set up the ui instances for ui_ in uis: extensions.populateui(ui_) if options[b'version']: return commands.version_(ui) if options[b'help']: return commands.help_(ui, cmd, command=cmd is not None) elif not cmd: return commands.help_(ui, b'shortlist') repo = None cmdpats = args[:] assert func is not None # help out pytype if not func.norepo: # use the repo from the request only if we don't have -R if not rpath and not cwd: repo = req.repo if repo: # set the descriptors of the repo ui to those of ui repo.ui.fin = ui.fin repo.ui.fout = ui.fout repo.ui.ferr = ui.ferr repo.ui.fmsg = ui.fmsg else: try: repo = hg.repository( ui, path=path, presetupfuncs=req.prereposetups, intents=func.intents, ) if not repo.local(): raise error.Abort( _(b"repository '%s' is not local") % path ) repo.ui.setconfig( b"bundle", b"mainreporoot", repo.root, b'repo' ) except error.RequirementError: raise except error.RepoError: if rpath: # invalid -R path raise if not func.optionalrepo: if func.inferrepo and args and not path: # try to infer -R from command args repos = pycompat.maplist(cmdutil.findrepo, args) guess = repos[0] if guess and repos.count(guess) == len(repos): req.args = [b'--repository', guess] + fullargs req.earlyoptions[b'repository'] = guess return _dispatch(req) if not path: raise error.RepoError( _( b"no repository found in" b" '%s' (.hg not found)" ) % encoding.getcwd() ) raise if repo: ui = repo.ui if options[b'hidden']: repo = repo.unfiltered() args.insert(0, repo) elif rpath: ui.warn(_(b"warning: --repository ignored\n")) msg = _formatargs(fullargs) ui.log(b"command", b'%s\n', msg) strcmdopt = pycompat.strkwargs(cmdoptions) d = lambda: util.checksignature(func)(ui, *args, **strcmdopt) try: return runcommand( lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions ) finally: if repo and repo != req.repo: repo.close() def _runcommand(ui, options, cmd, cmdfunc): """Run a command function, possibly with profiling enabled.""" try: with tracing.log("Running %s command" % cmd): return cmdfunc() except error.SignatureError: raise error.CommandError(cmd, _(b'invalid arguments')) def _exceptionwarning(ui): """Produce a warning message for the current active exception""" # For compatibility checking, we discard the portion of the hg # version after the + on the assumption that if a "normal # user" is running a build with a + in it the packager # probably built from fairly close to a tag and anyone with a # 'make local' copy of hg (where the version number can be out # of date) will be clueful enough to notice the implausible # version number and try updating. ct = util.versiontuple(n=2) worst = None, ct, b'' if ui.config(b'ui', b'supportcontact') is None: for name, mod in extensions.extensions(): # 'testedwith' should be bytes, but not all extensions are ported # to py3 and we don't want UnicodeException because of that. testedwith = stringutil.forcebytestr( getattr(mod, 'testedwith', b'') ) report = getattr(mod, 'buglink', _(b'the extension author.')) if not testedwith.strip(): # We found an untested extension. It's likely the culprit. worst = name, b'unknown', report break # Never blame on extensions bundled with Mercurial. if extensions.ismoduleinternal(mod): continue tested = [util.versiontuple(t, 2) for t in testedwith.split()] if ct in tested: continue lower = [t for t in tested if t < ct] nearest = max(lower or tested) if worst[0] is None or nearest < worst[1]: worst = name, nearest, report if worst[0] is not None: name, testedwith, report = worst if not isinstance(testedwith, (bytes, str)): testedwith = b'.'.join( [stringutil.forcebytestr(c) for c in testedwith] ) warning = _( b'** Unknown exception encountered with ' b'possibly-broken third-party extension %s\n' b'** which supports versions %s of Mercurial.\n' b'** Please disable %s and try your action again.\n' b'** If that fixes the bug please report it to %s\n' ) % (name, testedwith, name, stringutil.forcebytestr(report)) else: bugtracker = ui.config(b'ui', b'supportcontact') if bugtracker is None: bugtracker = _(b"https://mercurial-scm.org/wiki/BugTracker") warning = ( _( b"** unknown exception encountered, " b"please report by visiting\n** " ) + bugtracker + b'\n' ) sysversion = pycompat.sysbytes(sys.version).replace(b'\n', b'') warning += ( (_(b"** Python %s\n") % sysversion) + (_(b"** Mercurial Distributed SCM (version %s)\n") % util.version()) + ( _(b"** Extensions loaded: %s\n") % b", ".join([x[0] for x in extensions.extensions()]) ) ) return warning def handlecommandexception(ui): """Produce a warning message for broken commands Called when handling an exception; the exception is reraised if this function returns False, ignored otherwise. """ warning = _exceptionwarning(ui) ui.log( b"commandexception", b"%s\n%s\n", warning, pycompat.sysbytes(traceback.format_exc()), ) ui.warn(warning) return False # re-raise the exception diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -1,3552 +1,3562 @@ # localrepo.py - read/write repository class for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import functools import os import random import sys import time import weakref from .i18n import _ from .node import ( bin, hex, nullid, nullrev, short, ) from .pycompat import ( delattr, getattr, ) from . import ( bookmarks, branchmap, bundle2, changegroup, color, commit, context, dirstate, dirstateguard, discovery, encoding, error, exchange, extensions, filelog, hook, lock as lockmod, match as matchmod, mergestate as mergestatemod, mergeutil, namespaces, narrowspec, obsolete, pathutil, phases, pushkey, pycompat, rcutil, repoview, requirements as requirementsmod, revset, revsetlang, scmutil, sparse, store as storemod, subrepoutil, tags as tagsmod, transaction, txnutil, util, vfs as vfsmod, ) from .interfaces import ( repository, util as interfaceutil, ) from .utils import ( hashutil, procutil, stringutil, ) from .revlogutils import constants as revlogconst release = lockmod.release urlerr = util.urlerr urlreq = util.urlreq # set of (path, vfs-location) tuples. vfs-location is: # - 'plain for vfs relative paths # - '' for svfs relative paths _cachedfiles = set() class _basefilecache(scmutil.filecache): """All filecache usage on repo are done for logic that should be unfiltered """ def __get__(self, repo, type=None): if repo is None: return self # proxy to unfiltered __dict__ since filtered repo has no entry unfi = repo.unfiltered() try: return unfi.__dict__[self.sname] except KeyError: pass return super(_basefilecache, self).__get__(unfi, type) def set(self, repo, value): return super(_basefilecache, self).set(repo.unfiltered(), value) class repofilecache(_basefilecache): """filecache for files in .hg but outside of .hg/store""" def __init__(self, *paths): super(repofilecache, self).__init__(*paths) for path in paths: _cachedfiles.add((path, b'plain')) def join(self, obj, fname): return obj.vfs.join(fname) class storecache(_basefilecache): """filecache for files in the store""" def __init__(self, *paths): super(storecache, self).__init__(*paths) for path in paths: _cachedfiles.add((path, b'')) def join(self, obj, fname): return obj.sjoin(fname) class mixedrepostorecache(_basefilecache): """filecache for a mix files in .hg/store and outside""" def __init__(self, *pathsandlocations): # scmutil.filecache only uses the path for passing back into our # join(), so we can safely pass a list of paths and locations super(mixedrepostorecache, self).__init__(*pathsandlocations) _cachedfiles.update(pathsandlocations) def join(self, obj, fnameandlocation): fname, location = fnameandlocation if location == b'plain': return obj.vfs.join(fname) else: if location != b'': raise error.ProgrammingError( b'unexpected location: %s' % location ) return obj.sjoin(fname) def isfilecached(repo, name): """check if a repo has already cached "name" filecache-ed property This returns (cachedobj-or-None, iscached) tuple. """ cacheentry = repo.unfiltered()._filecache.get(name, None) if not cacheentry: return None, False return cacheentry.obj, True class unfilteredpropertycache(util.propertycache): """propertycache that apply to unfiltered repo only""" def __get__(self, repo, type=None): unfi = repo.unfiltered() if unfi is repo: return super(unfilteredpropertycache, self).__get__(unfi) return getattr(unfi, self.name) class filteredpropertycache(util.propertycache): """propertycache that must take filtering in account""" def cachevalue(self, obj, value): object.__setattr__(obj, self.name, value) def hasunfilteredcache(repo, name): """check if a repo has an unfilteredpropertycache value for """ return name in vars(repo.unfiltered()) def unfilteredmethod(orig): """decorate method that always need to be run on unfiltered version""" @functools.wraps(orig) def wrapper(repo, *args, **kwargs): return orig(repo.unfiltered(), *args, **kwargs) return wrapper moderncaps = { b'lookup', b'branchmap', b'pushkey', b'known', b'getbundle', b'unbundle', } legacycaps = moderncaps.union({b'changegroupsubset'}) @interfaceutil.implementer(repository.ipeercommandexecutor) class localcommandexecutor(object): def __init__(self, peer): self._peer = peer self._sent = False self._closed = False def __enter__(self): return self def __exit__(self, exctype, excvalue, exctb): self.close() def callcommand(self, command, args): if self._sent: raise error.ProgrammingError( b'callcommand() cannot be used after sendcommands()' ) if self._closed: raise error.ProgrammingError( b'callcommand() cannot be used after close()' ) # We don't need to support anything fancy. Just call the named # method on the peer and return a resolved future. fn = getattr(self._peer, pycompat.sysstr(command)) f = pycompat.futures.Future() try: result = fn(**pycompat.strkwargs(args)) except Exception: pycompat.future_set_exception_info(f, sys.exc_info()[1:]) else: f.set_result(result) return f def sendcommands(self): self._sent = True def close(self): self._closed = True @interfaceutil.implementer(repository.ipeercommands) class localpeer(repository.peer): '''peer for a local repo; reflects only the most recent API''' def __init__(self, repo, caps=None): super(localpeer, self).__init__() if caps is None: caps = moderncaps.copy() self._repo = repo.filtered(b'served') self.ui = repo.ui self._caps = repo._restrictcapabilities(caps) # Begin of _basepeer interface. def url(self): return self._repo.url() def local(self): return self._repo def peer(self): return self def canpush(self): return True def close(self): self._repo.close() # End of _basepeer interface. # Begin of _basewirecommands interface. def branchmap(self): return self._repo.branchmap() def capabilities(self): return self._caps def clonebundles(self): return self._repo.tryread(b'clonebundles.manifest') def debugwireargs(self, one, two, three=None, four=None, five=None): """Used to test argument passing over the wire""" return b"%s %s %s %s %s" % ( one, two, pycompat.bytestr(three), pycompat.bytestr(four), pycompat.bytestr(five), ) def getbundle( self, source, heads=None, common=None, bundlecaps=None, **kwargs ): chunks = exchange.getbundlechunks( self._repo, source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs )[1] cb = util.chunkbuffer(chunks) if exchange.bundle2requested(bundlecaps): # When requesting a bundle2, getbundle returns a stream to make the # wire level function happier. We need to build a proper object # from it in local peer. return bundle2.getunbundler(self.ui, cb) else: return changegroup.getunbundler(b'01', cb, None) def heads(self): return self._repo.heads() def known(self, nodes): return self._repo.known(nodes) def listkeys(self, namespace): return self._repo.listkeys(namespace) def lookup(self, key): return self._repo.lookup(key) def pushkey(self, namespace, key, old, new): return self._repo.pushkey(namespace, key, old, new) def stream_out(self): raise error.Abort(_(b'cannot perform stream clone against local peer')) def unbundle(self, bundle, heads, url): """apply a bundle on a repo This function handles the repo locking itself.""" try: try: bundle = exchange.readbundle(self.ui, bundle, None) ret = exchange.unbundle(self._repo, bundle, heads, b'push', url) if util.safehasattr(ret, b'getchunks'): # This is a bundle20 object, turn it into an unbundler. # This little dance should be dropped eventually when the # API is finally improved. stream = util.chunkbuffer(ret.getchunks()) ret = bundle2.getunbundler(self.ui, stream) return ret except Exception as exc: # If the exception contains output salvaged from a bundle2 # reply, we need to make sure it is printed before continuing # to fail. So we build a bundle2 with such output and consume # it directly. # # This is not very elegant but allows a "simple" solution for # issue4594 output = getattr(exc, '_bundle2salvagedoutput', ()) if output: bundler = bundle2.bundle20(self._repo.ui) for out in output: bundler.addpart(out) stream = util.chunkbuffer(bundler.getchunks()) b = bundle2.getunbundler(self.ui, stream) bundle2.processbundle(self._repo, b) raise except error.PushRaced as exc: raise error.ResponseError( _(b'push failed:'), stringutil.forcebytestr(exc) ) # End of _basewirecommands interface. # Begin of peer interface. def commandexecutor(self): return localcommandexecutor(self) # End of peer interface. @interfaceutil.implementer(repository.ipeerlegacycommands) class locallegacypeer(localpeer): '''peer extension which implements legacy methods too; used for tests with restricted capabilities''' def __init__(self, repo): super(locallegacypeer, self).__init__(repo, caps=legacycaps) # Begin of baselegacywirecommands interface. def between(self, pairs): return self._repo.between(pairs) def branches(self, nodes): return self._repo.branches(nodes) def changegroup(self, nodes, source): outgoing = discovery.outgoing( self._repo, missingroots=nodes, ancestorsof=self._repo.heads() ) return changegroup.makechangegroup(self._repo, outgoing, b'01', source) def changegroupsubset(self, bases, heads, source): outgoing = discovery.outgoing( self._repo, missingroots=bases, ancestorsof=heads ) return changegroup.makechangegroup(self._repo, outgoing, b'01', source) # End of baselegacywirecommands interface. # Functions receiving (ui, features) that extensions can register to impact # the ability to load repositories with custom requirements. Only # functions defined in loaded extensions are called. # # The function receives a set of requirement strings that the repository # is capable of opening. Functions will typically add elements to the # set to reflect that the extension knows how to handle that requirements. featuresetupfuncs = set() def _getsharedvfs(hgvfs, requirements): """ returns the vfs object pointing to root of shared source repo for a shared repository hgvfs is vfs pointing at .hg/ of current repo (shared one) requirements is a set of requirements of current repo (shared one) """ # The ``shared`` or ``relshared`` requirements indicate the # store lives in the path contained in the ``.hg/sharedpath`` file. # This is an absolute path for ``shared`` and relative to # ``.hg/`` for ``relshared``. sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n') if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements: sharedpath = hgvfs.join(sharedpath) sharedvfs = vfsmod.vfs(sharedpath, realpath=True) if not sharedvfs.exists(): raise error.RepoError( _(b'.hg/sharedpath points to nonexistent directory %s') % sharedvfs.base ) return sharedvfs def _readrequires(vfs, allowmissing): """ reads the require file present at root of this vfs and return a set of requirements If allowmissing is True, we suppress ENOENT if raised""" # requires file contains a newline-delimited list of # features/capabilities the opener (us) must have in order to use # the repository. This file was introduced in Mercurial 0.9.2, # which means very old repositories may not have one. We assume # a missing file translates to no requirements. try: requirements = set(vfs.read(b'requires').splitlines()) except IOError as e: if not (allowmissing and e.errno == errno.ENOENT): raise requirements = set() return requirements def makelocalrepository(baseui, path, intents=None): """Create a local repository object. Given arguments needed to construct a local repository, this function performs various early repository loading functionality (such as reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that the repository can be opened, derives a type suitable for representing that repository, and returns an instance of it. The returned object conforms to the ``repository.completelocalrepository`` interface. The repository type is derived by calling a series of factory functions for each aspect/interface of the final repository. These are defined by ``REPO_INTERFACES``. Each factory function is called to produce a type implementing a specific interface. The cumulative list of returned types will be combined into a new type and that type will be instantiated to represent the local repository. The factory functions each receive various state that may be consulted as part of deriving a type. Extensions should wrap these factory functions to customize repository type creation. Note that an extension's wrapped function may be called even if that extension is not loaded for the repo being constructed. Extensions should check if their ``__name__`` appears in the ``extensionmodulenames`` set passed to the factory function and no-op if not. """ ui = baseui.copy() # Prevent copying repo configuration. ui.copy = baseui.copy # Working directory VFS rooted at repository root. wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True) # Main VFS for .hg/ directory. hgpath = wdirvfs.join(b'.hg') hgvfs = vfsmod.vfs(hgpath, cacheaudited=True) # Whether this repository is shared one or not shared = False # If this repository is shared, vfs pointing to shared repo sharedvfs = None # The .hg/ path should exist and should be a directory. All other # cases are errors. if not hgvfs.isdir(): try: hgvfs.stat() except OSError as e: if e.errno != errno.ENOENT: raise except ValueError as e: # Can be raised on Python 3.8 when path is invalid. raise error.Abort( _(b'invalid path %s: %s') % (path, pycompat.bytestr(e)) ) raise error.RepoError(_(b'repository %s not found') % path) requirements = _readrequires(hgvfs, True) shared = ( requirementsmod.SHARED_REQUIREMENT in requirements or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements ) if shared: sharedvfs = _getsharedvfs(hgvfs, requirements) # if .hg/requires contains the sharesafe requirement, it means # there exists a `.hg/store/requires` too and we should read it # NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement # is present. We never write SHARESAFE_REQUIREMENT for a repo if store # is not present, refer checkrequirementscompat() for that if requirementsmod.SHARESAFE_REQUIREMENT in requirements: if shared: # This is a shared repo storevfs = vfsmod.vfs(sharedvfs.join(b'store')) else: storevfs = vfsmod.vfs(hgvfs.join(b'store')) requirements |= _readrequires(storevfs, False) # The .hg/hgrc file may load extensions or contain config options # that influence repository construction. Attempt to load it and # process any new extensions that it may have pulled in. if loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs): afterhgrcload(ui, wdirvfs, hgvfs, requirements) extensions.loadall(ui) extensions.populateui(ui) # Set of module names of extensions loaded for this repository. extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)} supportedrequirements = gathersupportedrequirements(ui) # We first validate the requirements are known. ensurerequirementsrecognized(requirements, supportedrequirements) # Then we validate that the known set is reasonable to use together. ensurerequirementscompatible(ui, requirements) # TODO there are unhandled edge cases related to opening repositories with # shared storage. If storage is shared, we should also test for requirements # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in # that repo, as that repo may load extensions needed to open it. This is a # bit complicated because we don't want the other hgrc to overwrite settings # in this hgrc. # # This bug is somewhat mitigated by the fact that we copy the .hg/requires # file when sharing repos. But if a requirement is added after the share is # performed, thereby introducing a new requirement for the opener, we may # will not see that and could encounter a run-time error interacting with # that shared store since it has an unknown-to-us requirement. # At this point, we know we should be capable of opening the repository. # Now get on with doing that. features = set() # The "store" part of the repository holds versioned data. How it is # accessed is determined by various requirements. If `shared` or # `relshared` requirements are present, this indicates current repository # is a share and store exists in path mentioned in `.hg/sharedpath` if shared: storebasepath = sharedvfs.base cachepath = sharedvfs.join(b'cache') features.add(repository.REPO_FEATURE_SHARED_STORAGE) else: storebasepath = hgvfs.base cachepath = hgvfs.join(b'cache') wcachepath = hgvfs.join(b'wcache') # The store has changed over time and the exact layout is dictated by # requirements. The store interface abstracts differences across all # of them. store = makestore( requirements, storebasepath, lambda base: vfsmod.vfs(base, cacheaudited=True), ) hgvfs.createmode = store.createmode storevfs = store.vfs storevfs.options = resolvestorevfsoptions(ui, requirements, features) # The cache vfs is used to manage cache files. cachevfs = vfsmod.vfs(cachepath, cacheaudited=True) cachevfs.createmode = store.createmode # The cache vfs is used to manage cache files related to the working copy wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True) wcachevfs.createmode = store.createmode # Now resolve the type for the repository object. We do this by repeatedly # calling a factory function to produces types for specific aspects of the # repo's operation. The aggregate returned types are used as base classes # for a dynamically-derived type, which will represent our new repository. bases = [] extrastate = {} for iface, fn in REPO_INTERFACES: # We pass all potentially useful state to give extensions tons of # flexibility. typ = fn()( ui=ui, intents=intents, requirements=requirements, features=features, wdirvfs=wdirvfs, hgvfs=hgvfs, store=store, storevfs=storevfs, storeoptions=storevfs.options, cachevfs=cachevfs, wcachevfs=wcachevfs, extensionmodulenames=extensionmodulenames, extrastate=extrastate, baseclasses=bases, ) if not isinstance(typ, type): raise error.ProgrammingError( b'unable to construct type for %s' % iface ) bases.append(typ) # type() allows you to use characters in type names that wouldn't be # recognized as Python symbols in source code. We abuse that to add # rich information about our constructed repo. name = pycompat.sysstr( b'derivedrepo:%s<%s>' % (wdirvfs.base, b','.join(sorted(requirements))) ) cls = type(name, tuple(bases), {}) return cls( baseui=baseui, ui=ui, origroot=path, wdirvfs=wdirvfs, hgvfs=hgvfs, requirements=requirements, supportedrequirements=supportedrequirements, sharedpath=storebasepath, store=store, cachevfs=cachevfs, wcachevfs=wcachevfs, features=features, intents=intents, ) def loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs=None): """Load hgrc files/content into a ui instance. This is called during repository opening to load any additional config files or settings relevant to the current repository. Returns a bool indicating whether any additional configs were loaded. Extensions should monkeypatch this function to modify how per-repo configs are loaded. For example, an extension may wish to pull in configs from alternate files or sources. sharedvfs is vfs object pointing to source repo if the current one is a shared one """ if not rcutil.use_repo_hgrc(): return False + ret = False # first load config from shared source if we has to if requirementsmod.SHARESAFE_REQUIREMENT in requirements and sharedvfs: try: ui.readconfig(sharedvfs.join(b'hgrc'), root=sharedvfs.base) + ret = True except IOError: pass try: ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base) - return True + ret = True except IOError: - return False + pass + + try: + ui.readconfig(hgvfs.join(b'hgrc-not-shared'), root=wdirvfs.base) + ret = True + except IOError: + pass + + return ret def afterhgrcload(ui, wdirvfs, hgvfs, requirements): """Perform additional actions after .hg/hgrc is loaded. This function is called during repository loading immediately after the .hg/hgrc file is loaded and before per-repo extensions are loaded. The function can be used to validate configs, automatically add options (including extensions) based on requirements, etc. """ # Map of requirements to list of extensions to load automatically when # requirement is present. autoextensions = { b'git': [b'git'], b'largefiles': [b'largefiles'], b'lfs': [b'lfs'], } for requirement, names in sorted(autoextensions.items()): if requirement not in requirements: continue for name in names: if not ui.hasconfig(b'extensions', name): ui.setconfig(b'extensions', name, b'', source=b'autoload') def gathersupportedrequirements(ui): """Determine the complete set of recognized requirements.""" # Start with all requirements supported by this file. supported = set(localrepository._basesupported) # Execute ``featuresetupfuncs`` entries if they belong to an extension # relevant to this ui instance. modules = {m.__name__ for n, m in extensions.extensions(ui)} for fn in featuresetupfuncs: if fn.__module__ in modules: fn(ui, supported) # Add derived requirements from registered compression engines. for name in util.compengines: engine = util.compengines[name] if engine.available() and engine.revlogheader(): supported.add(b'exp-compression-%s' % name) if engine.name() == b'zstd': supported.add(b'revlog-compression-zstd') return supported def ensurerequirementsrecognized(requirements, supported): """Validate that a set of local requirements is recognized. Receives a set of requirements. Raises an ``error.RepoError`` if there exists any requirement in that set that currently loaded code doesn't recognize. Returns a set of supported requirements. """ missing = set() for requirement in requirements: if requirement in supported: continue if not requirement or not requirement[0:1].isalnum(): raise error.RequirementError(_(b'.hg/requires file is corrupt')) missing.add(requirement) if missing: raise error.RequirementError( _(b'repository requires features unknown to this Mercurial: %s') % b' '.join(sorted(missing)), hint=_( b'see https://mercurial-scm.org/wiki/MissingRequirement ' b'for more information' ), ) def ensurerequirementscompatible(ui, requirements): """Validates that a set of recognized requirements is mutually compatible. Some requirements may not be compatible with others or require config options that aren't enabled. This function is called during repository opening to ensure that the set of requirements needed to open a repository is sane and compatible with config options. Extensions can monkeypatch this function to perform additional checking. ``error.RepoError`` should be raised on failure. """ if ( requirementsmod.SPARSE_REQUIREMENT in requirements and not sparse.enabled ): raise error.RepoError( _( b'repository is using sparse feature but ' b'sparse is not enabled; enable the ' b'"sparse" extensions to access' ) ) def makestore(requirements, path, vfstype): """Construct a storage object for a repository.""" if b'store' in requirements: if b'fncache' in requirements: return storemod.fncachestore( path, vfstype, b'dotencode' in requirements ) return storemod.encodedstore(path, vfstype) return storemod.basicstore(path, vfstype) def resolvestorevfsoptions(ui, requirements, features): """Resolve the options to pass to the store vfs opener. The returned dict is used to influence behavior of the storage layer. """ options = {} if requirementsmod.TREEMANIFEST_REQUIREMENT in requirements: options[b'treemanifest'] = True # experimental config: format.manifestcachesize manifestcachesize = ui.configint(b'format', b'manifestcachesize') if manifestcachesize is not None: options[b'manifestcachesize'] = manifestcachesize # In the absence of another requirement superseding a revlog-related # requirement, we have to assume the repo is using revlog version 0. # This revlog format is super old and we don't bother trying to parse # opener options for it because those options wouldn't do anything # meaningful on such old repos. if ( b'revlogv1' in requirements or requirementsmod.REVLOGV2_REQUIREMENT in requirements ): options.update(resolverevlogstorevfsoptions(ui, requirements, features)) else: # explicitly mark repo as using revlogv0 options[b'revlogv0'] = True if requirementsmod.COPIESSDC_REQUIREMENT in requirements: options[b'copies-storage'] = b'changeset-sidedata' else: writecopiesto = ui.config(b'experimental', b'copies.write-to') copiesextramode = (b'changeset-only', b'compatibility') if writecopiesto in copiesextramode: options[b'copies-storage'] = b'extra' return options def resolverevlogstorevfsoptions(ui, requirements, features): """Resolve opener options specific to revlogs.""" options = {} options[b'flagprocessors'] = {} if b'revlogv1' in requirements: options[b'revlogv1'] = True if requirementsmod.REVLOGV2_REQUIREMENT in requirements: options[b'revlogv2'] = True if b'generaldelta' in requirements: options[b'generaldelta'] = True # experimental config: format.chunkcachesize chunkcachesize = ui.configint(b'format', b'chunkcachesize') if chunkcachesize is not None: options[b'chunkcachesize'] = chunkcachesize deltabothparents = ui.configbool( b'storage', b'revlog.optimize-delta-parent-choice' ) options[b'deltabothparents'] = deltabothparents lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta') lazydeltabase = False if lazydelta: lazydeltabase = ui.configbool( b'storage', b'revlog.reuse-external-delta-parent' ) if lazydeltabase is None: lazydeltabase = not scmutil.gddeltaconfig(ui) options[b'lazydelta'] = lazydelta options[b'lazydeltabase'] = lazydeltabase chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan') if 0 <= chainspan: options[b'maxdeltachainspan'] = chainspan mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold') if mmapindexthreshold is not None: options[b'mmapindexthreshold'] = mmapindexthreshold withsparseread = ui.configbool(b'experimental', b'sparse-read') srdensitythres = float( ui.config(b'experimental', b'sparse-read.density-threshold') ) srmingapsize = ui.configbytes(b'experimental', b'sparse-read.min-gap-size') options[b'with-sparse-read'] = withsparseread options[b'sparse-read-density-threshold'] = srdensitythres options[b'sparse-read-min-gap-size'] = srmingapsize sparserevlog = requirementsmod.SPARSEREVLOG_REQUIREMENT in requirements options[b'sparse-revlog'] = sparserevlog if sparserevlog: options[b'generaldelta'] = True sidedata = requirementsmod.SIDEDATA_REQUIREMENT in requirements options[b'side-data'] = sidedata maxchainlen = None if sparserevlog: maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH # experimental config: format.maxchainlen maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen) if maxchainlen is not None: options[b'maxchainlen'] = maxchainlen for r in requirements: # we allow multiple compression engine requirement to co-exist because # strickly speaking, revlog seems to support mixed compression style. # # The compression used for new entries will be "the last one" prefix = r.startswith if prefix(b'revlog-compression-') or prefix(b'exp-compression-'): options[b'compengine'] = r.split(b'-', 2)[2] options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level') if options[b'zlib.level'] is not None: if not (0 <= options[b'zlib.level'] <= 9): msg = _(b'invalid value for `storage.revlog.zlib.level` config: %d') raise error.Abort(msg % options[b'zlib.level']) options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level') if options[b'zstd.level'] is not None: if not (0 <= options[b'zstd.level'] <= 22): msg = _(b'invalid value for `storage.revlog.zstd.level` config: %d') raise error.Abort(msg % options[b'zstd.level']) if requirementsmod.NARROW_REQUIREMENT in requirements: options[b'enableellipsis'] = True if ui.configbool(b'experimental', b'rust.index'): options[b'rust.index'] = True if requirementsmod.NODEMAP_REQUIREMENT in requirements: options[b'persistent-nodemap'] = True if ui.configbool(b'storage', b'revlog.nodemap.mmap'): options[b'persistent-nodemap.mmap'] = True epnm = ui.config(b'storage', b'revlog.nodemap.mode') options[b'persistent-nodemap.mode'] = epnm if ui.configbool(b'devel', b'persistent-nodemap'): options[b'devel-force-nodemap'] = True return options def makemain(**kwargs): """Produce a type conforming to ``ilocalrepositorymain``.""" return localrepository @interfaceutil.implementer(repository.ilocalrepositoryfilestorage) class revlogfilestorage(object): """File storage when using revlogs.""" def file(self, path): if path[0] == b'/': path = path[1:] return filelog.filelog(self.svfs, path) @interfaceutil.implementer(repository.ilocalrepositoryfilestorage) class revlognarrowfilestorage(object): """File storage when using revlogs and narrow files.""" def file(self, path): if path[0] == b'/': path = path[1:] return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch) def makefilestorage(requirements, features, **kwargs): """Produce a type conforming to ``ilocalrepositoryfilestorage``.""" features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE) features.add(repository.REPO_FEATURE_STREAM_CLONE) if requirementsmod.NARROW_REQUIREMENT in requirements: return revlognarrowfilestorage else: return revlogfilestorage # List of repository interfaces and factory functions for them. Each # will be called in order during ``makelocalrepository()`` to iteratively # derive the final type for a local repository instance. We capture the # function as a lambda so we don't hold a reference and the module-level # functions can be wrapped. REPO_INTERFACES = [ (repository.ilocalrepositorymain, lambda: makemain), (repository.ilocalrepositoryfilestorage, lambda: makefilestorage), ] @interfaceutil.implementer(repository.ilocalrepositorymain) class localrepository(object): """Main class for representing local repositories. All local repositories are instances of this class. Constructed on its own, instances of this class are not usable as repository objects. To obtain a usable repository object, call ``hg.repository()``, ``localrepo.instance()``, or ``localrepo.makelocalrepository()``. The latter is the lowest-level. ``instance()`` adds support for creating new repositories. ``hg.repository()`` adds more extension integration, including calling ``reposetup()``. Generally speaking, ``hg.repository()`` should be used. """ # obsolete experimental requirements: # - manifestv2: An experimental new manifest format that allowed # for stem compression of long paths. Experiment ended up not # being successful (repository sizes went up due to worse delta # chains), and the code was deleted in 4.6. supportedformats = { b'revlogv1', b'generaldelta', requirementsmod.TREEMANIFEST_REQUIREMENT, requirementsmod.COPIESSDC_REQUIREMENT, requirementsmod.REVLOGV2_REQUIREMENT, requirementsmod.SIDEDATA_REQUIREMENT, requirementsmod.SPARSEREVLOG_REQUIREMENT, requirementsmod.NODEMAP_REQUIREMENT, bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT, requirementsmod.SHARESAFE_REQUIREMENT, } _basesupported = supportedformats | { b'store', b'fncache', requirementsmod.SHARED_REQUIREMENT, requirementsmod.RELATIVE_SHARED_REQUIREMENT, b'dotencode', requirementsmod.SPARSE_REQUIREMENT, requirementsmod.INTERNAL_PHASE_REQUIREMENT, } # list of prefix for file which can be written without 'wlock' # Extensions should extend this list when needed _wlockfreeprefix = { # We migh consider requiring 'wlock' for the next # two, but pretty much all the existing code assume # wlock is not needed so we keep them excluded for # now. b'hgrc', b'requires', # XXX cache is a complicatged business someone # should investigate this in depth at some point b'cache/', # XXX shouldn't be dirstate covered by the wlock? b'dirstate', # XXX bisect was still a bit too messy at the time # this changeset was introduced. Someone should fix # the remainig bit and drop this line b'bisect.state', } def __init__( self, baseui, ui, origroot, wdirvfs, hgvfs, requirements, supportedrequirements, sharedpath, store, cachevfs, wcachevfs, features, intents=None, ): """Create a new local repository instance. Most callers should use ``hg.repository()``, ``localrepo.instance()``, or ``localrepo.makelocalrepository()`` for obtaining a new repository object. Arguments: baseui ``ui.ui`` instance that ``ui`` argument was based off of. ui ``ui.ui`` instance for use by the repository. origroot ``bytes`` path to working directory root of this repository. wdirvfs ``vfs.vfs`` rooted at the working directory. hgvfs ``vfs.vfs`` rooted at .hg/ requirements ``set`` of bytestrings representing repository opening requirements. supportedrequirements ``set`` of bytestrings representing repository requirements that we know how to open. May be a supetset of ``requirements``. sharedpath ``bytes`` Defining path to storage base directory. Points to a ``.hg/`` directory somewhere. store ``store.basicstore`` (or derived) instance providing access to versioned storage. cachevfs ``vfs.vfs`` used for cache files. wcachevfs ``vfs.vfs`` used for cache files related to the working copy. features ``set`` of bytestrings defining features/capabilities of this instance. intents ``set`` of system strings indicating what this repo will be used for. """ self.baseui = baseui self.ui = ui self.origroot = origroot # vfs rooted at working directory. self.wvfs = wdirvfs self.root = wdirvfs.base # vfs rooted at .hg/. Used to access most non-store paths. self.vfs = hgvfs self.path = hgvfs.base self.requirements = requirements self.supported = supportedrequirements self.sharedpath = sharedpath self.store = store self.cachevfs = cachevfs self.wcachevfs = wcachevfs self.features = features self.filtername = None if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool( b'devel', b'check-locks' ): self.vfs.audit = self._getvfsward(self.vfs.audit) # A list of callback to shape the phase if no data were found. # Callback are in the form: func(repo, roots) --> processed root. # This list it to be filled by extension during repo setup self._phasedefaults = [] color.setup(self.ui) self.spath = self.store.path self.svfs = self.store.vfs self.sjoin = self.store.join if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool( b'devel', b'check-locks' ): if util.safehasattr(self.svfs, b'vfs'): # this is filtervfs self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit) else: # standard vfs self.svfs.audit = self._getsvfsward(self.svfs.audit) self._dirstatevalidatewarned = False self._branchcaches = branchmap.BranchMapCache() self._revbranchcache = None self._filterpats = {} self._datafilters = {} self._transref = self._lockref = self._wlockref = None # A cache for various files under .hg/ that tracks file changes, # (used by the filecache decorator) # # Maps a property name to its util.filecacheentry self._filecache = {} # hold sets of revision to be filtered # should be cleared when something might have changed the filter value: # - new changesets, # - phase change, # - new obsolescence marker, # - working directory parent change, # - bookmark changes self.filteredrevcache = {} # post-dirstate-status hooks self._postdsstatus = [] # generic mapping between names and nodes self.names = namespaces.namespaces() # Key to signature value. self._sparsesignaturecache = {} # Signature to cached matcher instance. self._sparsematchercache = {} self._extrafilterid = repoview.extrafilter(ui) self.filecopiesmode = None if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements: self.filecopiesmode = b'changeset-sidedata' def _getvfsward(self, origfunc): """build a ward for self.vfs""" rref = weakref.ref(self) def checkvfs(path, mode=None): ret = origfunc(path, mode=mode) repo = rref() if ( repo is None or not util.safehasattr(repo, b'_wlockref') or not util.safehasattr(repo, b'_lockref') ): return if mode in (None, b'r', b'rb'): return if path.startswith(repo.path): # truncate name relative to the repository (.hg) path = path[len(repo.path) + 1 :] if path.startswith(b'cache/'): msg = b'accessing cache with vfs instead of cachevfs: "%s"' repo.ui.develwarn(msg % path, stacklevel=3, config=b"cache-vfs") # path prefixes covered by 'lock' vfs_path_prefixes = ( b'journal.', b'undo.', b'strip-backup/', b'cache/', ) if any(path.startswith(prefix) for prefix in vfs_path_prefixes): if repo._currentlock(repo._lockref) is None: repo.ui.develwarn( b'write with no lock: "%s"' % path, stacklevel=3, config=b'check-locks', ) elif repo._currentlock(repo._wlockref) is None: # rest of vfs files are covered by 'wlock' # # exclude special files for prefix in self._wlockfreeprefix: if path.startswith(prefix): return repo.ui.develwarn( b'write with no wlock: "%s"' % path, stacklevel=3, config=b'check-locks', ) return ret return checkvfs def _getsvfsward(self, origfunc): """build a ward for self.svfs""" rref = weakref.ref(self) def checksvfs(path, mode=None): ret = origfunc(path, mode=mode) repo = rref() if repo is None or not util.safehasattr(repo, b'_lockref'): return if mode in (None, b'r', b'rb'): return if path.startswith(repo.sharedpath): # truncate name relative to the repository (.hg) path = path[len(repo.sharedpath) + 1 :] if repo._currentlock(repo._lockref) is None: repo.ui.develwarn( b'write with no lock: "%s"' % path, stacklevel=4 ) return ret return checksvfs def close(self): self._writecaches() def _writecaches(self): if self._revbranchcache: self._revbranchcache.write() def _restrictcapabilities(self, caps): if self.ui.configbool(b'experimental', b'bundle2-advertise'): caps = set(caps) capsblob = bundle2.encodecaps( bundle2.getrepocaps(self, role=b'client') ) caps.add(b'bundle2=' + urlreq.quote(capsblob)) return caps # Don't cache auditor/nofsauditor, or you'll end up with reference cycle: # self -> auditor -> self._checknested -> self @property def auditor(self): # This is only used by context.workingctx.match in order to # detect files in subrepos. return pathutil.pathauditor(self.root, callback=self._checknested) @property def nofsauditor(self): # This is only used by context.basectx.match in order to detect # files in subrepos. return pathutil.pathauditor( self.root, callback=self._checknested, realfs=False, cached=True ) def _checknested(self, path): """Determine if path is a legal nested repository.""" if not path.startswith(self.root): return False subpath = path[len(self.root) + 1 :] normsubpath = util.pconvert(subpath) # XXX: Checking against the current working copy is wrong in # the sense that it can reject things like # # $ hg cat -r 10 sub/x.txt # # if sub/ is no longer a subrepository in the working copy # parent revision. # # However, it can of course also allow things that would have # been rejected before, such as the above cat command if sub/ # is a subrepository now, but was a normal directory before. # The old path auditor would have rejected by mistake since it # panics when it sees sub/.hg/. # # All in all, checking against the working copy seems sensible # since we want to prevent access to nested repositories on # the filesystem *now*. ctx = self[None] parts = util.splitpath(subpath) while parts: prefix = b'/'.join(parts) if prefix in ctx.substate: if prefix == normsubpath: return True else: sub = ctx.sub(prefix) return sub.checknested(subpath[len(prefix) + 1 :]) else: parts.pop() return False def peer(self): return localpeer(self) # not cached to avoid reference cycle def unfiltered(self): """Return unfiltered version of the repository Intended to be overwritten by filtered repo.""" return self def filtered(self, name, visibilityexceptions=None): """Return a filtered version of a repository The `name` parameter is the identifier of the requested view. This will return a repoview object set "exactly" to the specified view. This function does not apply recursive filtering to a repository. For example calling `repo.filtered("served")` will return a repoview using the "served" view, regardless of the initial view used by `repo`. In other word, there is always only one level of `repoview` "filtering". """ if self._extrafilterid is not None and b'%' not in name: name = name + b'%' + self._extrafilterid cls = repoview.newtype(self.unfiltered().__class__) return cls(self, name, visibilityexceptions) @mixedrepostorecache( (b'bookmarks', b'plain'), (b'bookmarks.current', b'plain'), (b'bookmarks', b''), (b'00changelog.i', b''), ) def _bookmarks(self): # Since the multiple files involved in the transaction cannot be # written atomically (with current repository format), there is a race # condition here. # # 1) changelog content A is read # 2) outside transaction update changelog to content B # 3) outside transaction update bookmark file referring to content B # 4) bookmarks file content is read and filtered against changelog-A # # When this happens, bookmarks against nodes missing from A are dropped. # # Having this happening during read is not great, but it become worse # when this happen during write because the bookmarks to the "unknown" # nodes will be dropped for good. However, writes happen within locks. # This locking makes it possible to have a race free consistent read. # For this purpose data read from disc before locking are # "invalidated" right after the locks are taken. This invalidations are # "light", the `filecache` mechanism keep the data in memory and will # reuse them if the underlying files did not changed. Not parsing the # same data multiple times helps performances. # # Unfortunately in the case describe above, the files tracked by the # bookmarks file cache might not have changed, but the in-memory # content is still "wrong" because we used an older changelog content # to process the on-disk data. So after locking, the changelog would be # refreshed but `_bookmarks` would be preserved. # Adding `00changelog.i` to the list of tracked file is not # enough, because at the time we build the content for `_bookmarks` in # (4), the changelog file has already diverged from the content used # for loading `changelog` in (1) # # To prevent the issue, we force the changelog to be explicitly # reloaded while computing `_bookmarks`. The data race can still happen # without the lock (with a narrower window), but it would no longer go # undetected during the lock time refresh. # # The new schedule is as follow # # 1) filecache logic detect that `_bookmarks` needs to be computed # 2) cachestat for `bookmarks` and `changelog` are captured (for book) # 3) We force `changelog` filecache to be tested # 4) cachestat for `changelog` are captured (for changelog) # 5) `_bookmarks` is computed and cached # # The step in (3) ensure we have a changelog at least as recent as the # cache stat computed in (1). As a result at locking time: # * if the changelog did not changed since (1) -> we can reuse the data # * otherwise -> the bookmarks get refreshed. self._refreshchangelog() return bookmarks.bmstore(self) def _refreshchangelog(self): """make sure the in memory changelog match the on-disk one""" if 'changelog' in vars(self) and self.currenttransaction() is None: del self.changelog @property def _activebookmark(self): return self._bookmarks.active # _phasesets depend on changelog. what we need is to call # _phasecache.invalidate() if '00changelog.i' was changed, but it # can't be easily expressed in filecache mechanism. @storecache(b'phaseroots', b'00changelog.i') def _phasecache(self): return phases.phasecache(self, self._phasedefaults) @storecache(b'obsstore') def obsstore(self): return obsolete.makestore(self.ui, self) @storecache(b'00changelog.i') def changelog(self): # load dirstate before changelog to avoid race see issue6303 self.dirstate.prefetch_parents() return self.store.changelog(txnutil.mayhavepending(self.root)) @storecache(b'00manifest.i') def manifestlog(self): return self.store.manifestlog(self, self._storenarrowmatch) @repofilecache(b'dirstate') def dirstate(self): return self._makedirstate() def _makedirstate(self): """Extension point for wrapping the dirstate per-repo.""" sparsematchfn = lambda: sparse.matcher(self) return dirstate.dirstate( self.vfs, self.ui, self.root, self._dirstatevalidate, sparsematchfn ) def _dirstatevalidate(self, node): try: self.changelog.rev(node) return node except error.LookupError: if not self._dirstatevalidatewarned: self._dirstatevalidatewarned = True self.ui.warn( _(b"warning: ignoring unknown working parent %s!\n") % short(node) ) return nullid @storecache(narrowspec.FILENAME) def narrowpats(self): """matcher patterns for this repository's narrowspec A tuple of (includes, excludes). """ return narrowspec.load(self) @storecache(narrowspec.FILENAME) def _storenarrowmatch(self): if requirementsmod.NARROW_REQUIREMENT not in self.requirements: return matchmod.always() include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) @storecache(narrowspec.FILENAME) def _narrowmatch(self): if requirementsmod.NARROW_REQUIREMENT not in self.requirements: return matchmod.always() narrowspec.checkworkingcopynarrowspec(self) include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) def narrowmatch(self, match=None, includeexact=False): """matcher corresponding the the repo's narrowspec If `match` is given, then that will be intersected with the narrow matcher. If `includeexact` is True, then any exact matches from `match` will be included even if they're outside the narrowspec. """ if match: if includeexact and not self._narrowmatch.always(): # do not exclude explicitly-specified paths so that they can # be warned later on em = matchmod.exact(match.files()) nm = matchmod.unionmatcher([self._narrowmatch, em]) return matchmod.intersectmatchers(match, nm) return matchmod.intersectmatchers(match, self._narrowmatch) return self._narrowmatch def setnarrowpats(self, newincludes, newexcludes): narrowspec.save(self, newincludes, newexcludes) self.invalidate(clearfilecache=True) @unfilteredpropertycache def _quick_access_changeid_null(self): return { b'null': (nullrev, nullid), nullrev: (nullrev, nullid), nullid: (nullrev, nullid), } @unfilteredpropertycache def _quick_access_changeid_wc(self): # also fast path access to the working copy parents # however, only do it for filter that ensure wc is visible. quick = self._quick_access_changeid_null.copy() cl = self.unfiltered().changelog for node in self.dirstate.parents(): if node == nullid: continue rev = cl.index.get_rev(node) if rev is None: # unknown working copy parent case: # # skip the fast path and let higher code deal with it continue pair = (rev, node) quick[rev] = pair quick[node] = pair # also add the parents of the parents for r in cl.parentrevs(rev): if r == nullrev: continue n = cl.node(r) pair = (r, n) quick[r] = pair quick[n] = pair p1node = self.dirstate.p1() if p1node != nullid: quick[b'.'] = quick[p1node] return quick @unfilteredmethod def _quick_access_changeid_invalidate(self): if '_quick_access_changeid_wc' in vars(self): del self.__dict__['_quick_access_changeid_wc'] @property def _quick_access_changeid(self): """an helper dictionnary for __getitem__ calls This contains a list of symbol we can recognise right away without further processing. """ if self.filtername in repoview.filter_has_wc: return self._quick_access_changeid_wc return self._quick_access_changeid_null def __getitem__(self, changeid): # dealing with special cases if changeid is None: return context.workingctx(self) if isinstance(changeid, context.basectx): return changeid # dealing with multiple revisions if isinstance(changeid, slice): # wdirrev isn't contiguous so the slice shouldn't include it return [ self[i] for i in pycompat.xrange(*changeid.indices(len(self))) if i not in self.changelog.filteredrevs ] # dealing with some special values quick_access = self._quick_access_changeid.get(changeid) if quick_access is not None: rev, node = quick_access return context.changectx(self, rev, node, maybe_filtered=False) if changeid == b'tip': node = self.changelog.tip() rev = self.changelog.rev(node) return context.changectx(self, rev, node) # dealing with arbitrary values try: if isinstance(changeid, int): node = self.changelog.node(changeid) rev = changeid elif changeid == b'.': # this is a hack to delay/avoid loading obsmarkers # when we know that '.' won't be hidden node = self.dirstate.p1() rev = self.unfiltered().changelog.rev(node) elif len(changeid) == 20: try: node = changeid rev = self.changelog.rev(changeid) except error.FilteredLookupError: changeid = hex(changeid) # for the error message raise except LookupError: # check if it might have come from damaged dirstate # # XXX we could avoid the unfiltered if we had a recognizable # exception for filtered changeset access if ( self.local() and changeid in self.unfiltered().dirstate.parents() ): msg = _(b"working directory has unknown parent '%s'!") raise error.Abort(msg % short(changeid)) changeid = hex(changeid) # for the error message raise elif len(changeid) == 40: node = bin(changeid) rev = self.changelog.rev(node) else: raise error.ProgrammingError( b"unsupported changeid '%s' of type %s" % (changeid, pycompat.bytestr(type(changeid))) ) return context.changectx(self, rev, node) except (error.FilteredIndexError, error.FilteredLookupError): raise error.FilteredRepoLookupError( _(b"filtered revision '%s'") % pycompat.bytestr(changeid) ) except (IndexError, LookupError): raise error.RepoLookupError( _(b"unknown revision '%s'") % pycompat.bytestr(changeid) ) except error.WdirUnsupported: return context.workingctx(self) def __contains__(self, changeid): """True if the given changeid exists error.AmbiguousPrefixLookupError is raised if an ambiguous node specified. """ try: self[changeid] return True except error.RepoLookupError: return False def __nonzero__(self): return True __bool__ = __nonzero__ def __len__(self): # no need to pay the cost of repoview.changelog unfi = self.unfiltered() return len(unfi.changelog) def __iter__(self): return iter(self.changelog) def revs(self, expr, *args): '''Find revisions matching a revset. The revset is specified as a string ``expr`` that may contain %-formatting to escape certain types. See ``revsetlang.formatspec``. Revset aliases from the configuration are not expanded. To expand user aliases, consider calling ``scmutil.revrange()`` or ``repo.anyrevs([expr], user=True)``. Returns a smartset.abstractsmartset, which is a list-like interface that contains integer revisions. ''' tree = revsetlang.spectree(expr, *args) return revset.makematcher(tree)(self) def set(self, expr, *args): '''Find revisions matching a revset and emit changectx instances. This is a convenience wrapper around ``revs()`` that iterates the result and is a generator of changectx instances. Revset aliases from the configuration are not expanded. To expand user aliases, consider calling ``scmutil.revrange()``. ''' for r in self.revs(expr, *args): yield self[r] def anyrevs(self, specs, user=False, localalias=None): '''Find revisions matching one of the given revsets. Revset aliases from the configuration are not expanded by default. To expand user aliases, specify ``user=True``. To provide some local definitions overriding user aliases, set ``localalias`` to ``{name: definitionstring}``. ''' if specs == [b'null']: return revset.baseset([nullrev]) if specs == [b'.']: quick_data = self._quick_access_changeid.get(b'.') if quick_data is not None: return revset.baseset([quick_data[0]]) if user: m = revset.matchany( self.ui, specs, lookup=revset.lookupfn(self), localalias=localalias, ) else: m = revset.matchany(None, specs, localalias=localalias) return m(self) def url(self): return b'file:' + self.root def hook(self, name, throw=False, **args): """Call a hook, passing this repo instance. This a convenience method to aid invoking hooks. Extensions likely won't call this unless they have registered a custom hook or are replacing code that is expected to call a hook. """ return hook.hook(self.ui, self, name, throw, **args) @filteredpropertycache def _tagscache(self): '''Returns a tagscache object that contains various tags related caches.''' # This simplifies its cache management by having one decorated # function (this one) and the rest simply fetch things from it. class tagscache(object): def __init__(self): # These two define the set of tags for this repository. tags # maps tag name to node; tagtypes maps tag name to 'global' or # 'local'. (Global tags are defined by .hgtags across all # heads, and local tags are defined in .hg/localtags.) # They constitute the in-memory cache of tags. self.tags = self.tagtypes = None self.nodetagscache = self.tagslist = None cache = tagscache() cache.tags, cache.tagtypes = self._findtags() return cache def tags(self): '''return a mapping of tag to node''' t = {} if self.changelog.filteredrevs: tags, tt = self._findtags() else: tags = self._tagscache.tags rev = self.changelog.rev for k, v in pycompat.iteritems(tags): try: # ignore tags to unknown nodes rev(v) t[k] = v except (error.LookupError, ValueError): pass return t def _findtags(self): '''Do the hard work of finding tags. Return a pair of dicts (tags, tagtypes) where tags maps tag name to node, and tagtypes maps tag name to a string like \'global\' or \'local\'. Subclasses or extensions are free to add their own tags, but should be aware that the returned dicts will be retained for the duration of the localrepo object.''' # XXX what tagtype should subclasses/extensions use? Currently # mq and bookmarks add tags, but do not set the tagtype at all. # Should each extension invent its own tag type? Should there # be one tagtype for all such "virtual" tags? Or is the status # quo fine? # map tag name to (node, hist) alltags = tagsmod.findglobaltags(self.ui, self) # map tag name to tag type tagtypes = {tag: b'global' for tag in alltags} tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) # Build the return dicts. Have to re-encode tag names because # the tags module always uses UTF-8 (in order not to lose info # writing to the cache), but the rest of Mercurial wants them in # local encoding. tags = {} for (name, (node, hist)) in pycompat.iteritems(alltags): if node != nullid: tags[encoding.tolocal(name)] = node tags[b'tip'] = self.changelog.tip() tagtypes = { encoding.tolocal(name): value for (name, value) in pycompat.iteritems(tagtypes) } return (tags, tagtypes) def tagtype(self, tagname): ''' return the type of the given tag. result can be: 'local' : a local tag 'global' : a global tag None : tag does not exist ''' return self._tagscache.tagtypes.get(tagname) def tagslist(self): '''return a list of tags ordered by revision''' if not self._tagscache.tagslist: l = [] for t, n in pycompat.iteritems(self.tags()): l.append((self.changelog.rev(n), t, n)) self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)] return self._tagscache.tagslist def nodetags(self, node): '''return the tags associated with a node''' if not self._tagscache.nodetagscache: nodetagscache = {} for t, n in pycompat.iteritems(self._tagscache.tags): nodetagscache.setdefault(n, []).append(t) for tags in pycompat.itervalues(nodetagscache): tags.sort() self._tagscache.nodetagscache = nodetagscache return self._tagscache.nodetagscache.get(node, []) def nodebookmarks(self, node): """return the list of bookmarks pointing to the specified node""" return self._bookmarks.names(node) def branchmap(self): '''returns a dictionary {branch: [branchheads]} with branchheads ordered by increasing revision number''' return self._branchcaches[self] @unfilteredmethod def revbranchcache(self): if not self._revbranchcache: self._revbranchcache = branchmap.revbranchcache(self.unfiltered()) return self._revbranchcache def branchtip(self, branch, ignoremissing=False): '''return the tip node for a given branch If ignoremissing is True, then this method will not raise an error. This is helpful for callers that only expect None for a missing branch (e.g. namespace). ''' try: return self.branchmap().branchtip(branch) except KeyError: if not ignoremissing: raise error.RepoLookupError(_(b"unknown branch '%s'") % branch) else: pass def lookup(self, key): node = scmutil.revsymbol(self, key).node() if node is None: raise error.RepoLookupError(_(b"unknown revision '%s'") % key) return node def lookupbranch(self, key): if self.branchmap().hasbranch(key): return key return scmutil.revsymbol(self, key).branch() def known(self, nodes): cl = self.changelog get_rev = cl.index.get_rev filtered = cl.filteredrevs result = [] for n in nodes: r = get_rev(n) resp = not (r is None or r in filtered) result.append(resp) return result def local(self): return self def publishing(self): # it's safe (and desirable) to trust the publish flag unconditionally # so that we don't finalize changes shared between users via ssh or nfs return self.ui.configbool(b'phases', b'publish', untrusted=True) def cancopy(self): # so statichttprepo's override of local() works if not self.local(): return False if not self.publishing(): return True # if publishing we can't copy if there is filtered content return not self.filtered(b'visible').changelog.filteredrevs def shared(self): '''the type of shared repository (None if not shared)''' if self.sharedpath != self.path: return b'store' return None def wjoin(self, f, *insidef): return self.vfs.reljoin(self.root, f, *insidef) def setparents(self, p1, p2=nullid): self[None].setparents(p1, p2) self._quick_access_changeid_invalidate() def filectx(self, path, changeid=None, fileid=None, changectx=None): """changeid must be a changeset revision, if specified. fileid can be a file revision or node.""" return context.filectx( self, path, changeid, fileid, changectx=changectx ) def getcwd(self): return self.dirstate.getcwd() def pathto(self, f, cwd=None): return self.dirstate.pathto(f, cwd) def _loadfilter(self, filter): if filter not in self._filterpats: l = [] for pat, cmd in self.ui.configitems(filter): if cmd == b'!': continue mf = matchmod.match(self.root, b'', [pat]) fn = None params = cmd for name, filterfn in pycompat.iteritems(self._datafilters): if cmd.startswith(name): fn = filterfn params = cmd[len(name) :].lstrip() break if not fn: fn = lambda s, c, **kwargs: procutil.filter(s, c) fn.__name__ = 'commandfilter' # Wrap old filters not supporting keyword arguments if not pycompat.getargspec(fn)[2]: oldfn = fn fn = lambda s, c, oldfn=oldfn, **kwargs: oldfn(s, c) fn.__name__ = 'compat-' + oldfn.__name__ l.append((mf, fn, params)) self._filterpats[filter] = l return self._filterpats[filter] def _filter(self, filterpats, filename, data): for mf, fn, cmd in filterpats: if mf(filename): self.ui.debug( b"filtering %s through %s\n" % (filename, cmd or pycompat.sysbytes(fn.__name__)) ) data = fn(data, cmd, ui=self.ui, repo=self, filename=filename) break return data @unfilteredpropertycache def _encodefilterpats(self): return self._loadfilter(b'encode') @unfilteredpropertycache def _decodefilterpats(self): return self._loadfilter(b'decode') def adddatafilter(self, name, filter): self._datafilters[name] = filter def wread(self, filename): if self.wvfs.islink(filename): data = self.wvfs.readlink(filename) else: data = self.wvfs.read(filename) return self._filter(self._encodefilterpats, filename, data) def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs): """write ``data`` into ``filename`` in the working directory This returns length of written (maybe decoded) data. """ data = self._filter(self._decodefilterpats, filename, data) if b'l' in flags: self.wvfs.symlink(data, filename) else: self.wvfs.write( filename, data, backgroundclose=backgroundclose, **kwargs ) if b'x' in flags: self.wvfs.setflags(filename, False, True) else: self.wvfs.setflags(filename, False, False) return len(data) def wwritedata(self, filename, data): return self._filter(self._decodefilterpats, filename, data) def currenttransaction(self): """return the current transaction or None if non exists""" if self._transref: tr = self._transref() else: tr = None if tr and tr.running(): return tr return None def transaction(self, desc, report=None): if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool( b'devel', b'check-locks' ): if self._currentlock(self._lockref) is None: raise error.ProgrammingError(b'transaction requires locking') tr = self.currenttransaction() if tr is not None: return tr.nest(name=desc) # abort here if the journal already exists if self.svfs.exists(b"journal"): raise error.RepoError( _(b"abandoned transaction found"), hint=_(b"run 'hg recover' to clean up transaction"), ) idbase = b"%.40f#%f" % (random.random(), time.time()) ha = hex(hashutil.sha1(idbase).digest()) txnid = b'TXN:' + ha self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid) self._writejournal(desc) renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()] if report: rp = report else: rp = self.ui.warn vfsmap = {b'plain': self.vfs, b'store': self.svfs} # root of .hg/ # we must avoid cyclic reference between repo and transaction. reporef = weakref.ref(self) # Code to track tag movement # # Since tags are all handled as file content, it is actually quite hard # to track these movement from a code perspective. So we fallback to a # tracking at the repository level. One could envision to track changes # to the '.hgtags' file through changegroup apply but that fails to # cope with case where transaction expose new heads without changegroup # being involved (eg: phase movement). # # For now, We gate the feature behind a flag since this likely comes # with performance impacts. The current code run more often than needed # and do not use caches as much as it could. The current focus is on # the behavior of the feature so we disable it by default. The flag # will be removed when we are happy with the performance impact. # # Once this feature is no longer experimental move the following # documentation to the appropriate help section: # # The ``HG_TAG_MOVED`` variable will be set if the transaction touched # tags (new or changed or deleted tags). In addition the details of # these changes are made available in a file at: # ``REPOROOT/.hg/changes/tags.changes``. # Make sure you check for HG_TAG_MOVED before reading that file as it # might exist from a previous transaction even if no tag were touched # in this one. Changes are recorded in a line base format:: # # \n # # Actions are defined as follow: # "-R": tag is removed, # "+A": tag is added, # "-M": tag is moved (old value), # "+M": tag is moved (new value), tracktags = lambda x: None # experimental config: experimental.hook-track-tags shouldtracktags = self.ui.configbool( b'experimental', b'hook-track-tags' ) if desc != b'strip' and shouldtracktags: oldheads = self.changelog.headrevs() def tracktags(tr2): repo = reporef() oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads) newheads = repo.changelog.headrevs() newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads) # notes: we compare lists here. # As we do it only once buiding set would not be cheaper changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes) if changes: tr2.hookargs[b'tag_moved'] = b'1' with repo.vfs( b'changes/tags.changes', b'w', atomictemp=True ) as changesfile: # note: we do not register the file to the transaction # because we needs it to still exist on the transaction # is close (for txnclose hooks) tagsmod.writediff(changesfile, changes) def validate(tr2): """will run pre-closing hooks""" # XXX the transaction API is a bit lacking here so we take a hacky # path for now # # We cannot add this as a "pending" hooks since the 'tr.hookargs' # dict is copied before these run. In addition we needs the data # available to in memory hooks too. # # Moreover, we also need to make sure this runs before txnclose # hooks and there is no "pending" mechanism that would execute # logic only if hooks are about to run. # # Fixing this limitation of the transaction is also needed to track # other families of changes (bookmarks, phases, obsolescence). # # This will have to be fixed before we remove the experimental # gating. tracktags(tr2) repo = reporef() singleheadopt = (b'experimental', b'single-head-per-branch') singlehead = repo.ui.configbool(*singleheadopt) if singlehead: singleheadsub = repo.ui.configsuboptions(*singleheadopt)[1] accountclosed = singleheadsub.get( b"account-closed-heads", False ) scmutil.enforcesinglehead(repo, tr2, desc, accountclosed) if hook.hashook(repo.ui, b'pretxnclose-bookmark'): for name, (old, new) in sorted( tr.changes[b'bookmarks'].items() ): args = tr.hookargs.copy() args.update(bookmarks.preparehookargs(name, old, new)) repo.hook( b'pretxnclose-bookmark', throw=True, **pycompat.strkwargs(args) ) if hook.hashook(repo.ui, b'pretxnclose-phase'): cl = repo.unfiltered().changelog for revs, (old, new) in tr.changes[b'phases']: for rev in revs: args = tr.hookargs.copy() node = hex(cl.node(rev)) args.update(phases.preparehookargs(node, old, new)) repo.hook( b'pretxnclose-phase', throw=True, **pycompat.strkwargs(args) ) repo.hook( b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs) ) def releasefn(tr, success): repo = reporef() if repo is None: # If the repo has been GC'd (and this release function is being # called from transaction.__del__), there's not much we can do, # so just leave the unfinished transaction there and let the # user run `hg recover`. return if success: # this should be explicitly invoked here, because # in-memory changes aren't written out at closing # transaction, if tr.addfilegenerator (via # dirstate.write or so) isn't invoked while # transaction running repo.dirstate.write(None) else: # discard all changes (including ones already written # out) in this transaction narrowspec.restorebackup(self, b'journal.narrowspec') narrowspec.restorewcbackup(self, b'journal.narrowspec.dirstate') repo.dirstate.restorebackup(None, b'journal.dirstate') repo.invalidate(clearfilecache=True) tr = transaction.transaction( rp, self.svfs, vfsmap, b"journal", b"undo", aftertrans(renames), self.store.createmode, validator=validate, releasefn=releasefn, checkambigfiles=_cachedfiles, name=desc, ) tr.changes[b'origrepolen'] = len(self) tr.changes[b'obsmarkers'] = set() tr.changes[b'phases'] = [] tr.changes[b'bookmarks'] = {} tr.hookargs[b'txnid'] = txnid tr.hookargs[b'txnname'] = desc tr.hookargs[b'changes'] = tr.changes # note: writing the fncache only during finalize mean that the file is # outdated when running hooks. As fncache is used for streaming clone, # this is not expected to break anything that happen during the hooks. tr.addfinalize(b'flush-fncache', self.store.write) def txnclosehook(tr2): """To be run if transaction is successful, will schedule a hook run """ # Don't reference tr2 in hook() so we don't hold a reference. # This reduces memory consumption when there are multiple # transactions per lock. This can likely go away if issue5045 # fixes the function accumulation. hookargs = tr2.hookargs def hookfunc(unused_success): repo = reporef() if hook.hashook(repo.ui, b'txnclose-bookmark'): bmchanges = sorted(tr.changes[b'bookmarks'].items()) for name, (old, new) in bmchanges: args = tr.hookargs.copy() args.update(bookmarks.preparehookargs(name, old, new)) repo.hook( b'txnclose-bookmark', throw=False, **pycompat.strkwargs(args) ) if hook.hashook(repo.ui, b'txnclose-phase'): cl = repo.unfiltered().changelog phasemv = sorted( tr.changes[b'phases'], key=lambda r: r[0][0] ) for revs, (old, new) in phasemv: for rev in revs: args = tr.hookargs.copy() node = hex(cl.node(rev)) args.update(phases.preparehookargs(node, old, new)) repo.hook( b'txnclose-phase', throw=False, **pycompat.strkwargs(args) ) repo.hook( b'txnclose', throw=False, **pycompat.strkwargs(hookargs) ) reporef()._afterlock(hookfunc) tr.addfinalize(b'txnclose-hook', txnclosehook) # Include a leading "-" to make it happen before the transaction summary # reports registered via scmutil.registersummarycallback() whose names # are 00-txnreport etc. That way, the caches will be warm when the # callbacks run. tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr)) def txnaborthook(tr2): """To be run if transaction is aborted """ reporef().hook( b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs) ) tr.addabort(b'txnabort-hook', txnaborthook) # avoid eager cache invalidation. in-memory data should be identical # to stored data if transaction has no error. tr.addpostclose(b'refresh-filecachestats', self._refreshfilecachestats) self._transref = weakref.ref(tr) scmutil.registersummarycallback(self, tr, desc) return tr def _journalfiles(self): return ( (self.svfs, b'journal'), (self.svfs, b'journal.narrowspec'), (self.vfs, b'journal.narrowspec.dirstate'), (self.vfs, b'journal.dirstate'), (self.vfs, b'journal.branch'), (self.vfs, b'journal.desc'), (bookmarks.bookmarksvfs(self), b'journal.bookmarks'), (self.svfs, b'journal.phaseroots'), ) def undofiles(self): return [(vfs, undoname(x)) for vfs, x in self._journalfiles()] @unfilteredmethod def _writejournal(self, desc): self.dirstate.savebackup(None, b'journal.dirstate') narrowspec.savewcbackup(self, b'journal.narrowspec.dirstate') narrowspec.savebackup(self, b'journal.narrowspec') self.vfs.write( b"journal.branch", encoding.fromlocal(self.dirstate.branch()) ) self.vfs.write(b"journal.desc", b"%d\n%s\n" % (len(self), desc)) bookmarksvfs = bookmarks.bookmarksvfs(self) bookmarksvfs.write( b"journal.bookmarks", bookmarksvfs.tryread(b"bookmarks") ) self.svfs.write(b"journal.phaseroots", self.svfs.tryread(b"phaseroots")) def recover(self): with self.lock(): if self.svfs.exists(b"journal"): self.ui.status(_(b"rolling back interrupted transaction\n")) vfsmap = { b'': self.svfs, b'plain': self.vfs, } transaction.rollback( self.svfs, vfsmap, b"journal", self.ui.warn, checkambigfiles=_cachedfiles, ) self.invalidate() return True else: self.ui.warn(_(b"no interrupted transaction available\n")) return False def rollback(self, dryrun=False, force=False): wlock = lock = dsguard = None try: wlock = self.wlock() lock = self.lock() if self.svfs.exists(b"undo"): dsguard = dirstateguard.dirstateguard(self, b'rollback') return self._rollback(dryrun, force, dsguard) else: self.ui.warn(_(b"no rollback information available\n")) return 1 finally: release(dsguard, lock, wlock) @unfilteredmethod # Until we get smarter cache management def _rollback(self, dryrun, force, dsguard): ui = self.ui try: args = self.vfs.read(b'undo.desc').splitlines() (oldlen, desc, detail) = (int(args[0]), args[1], None) if len(args) >= 3: detail = args[2] oldtip = oldlen - 1 if detail and ui.verbose: msg = _( b'repository tip rolled back to revision %d' b' (undo %s: %s)\n' ) % (oldtip, desc, detail) else: msg = _( b'repository tip rolled back to revision %d (undo %s)\n' ) % (oldtip, desc) except IOError: msg = _(b'rolling back unknown transaction\n') desc = None if not force and self[b'.'] != self[b'tip'] and desc == b'commit': raise error.Abort( _( b'rollback of last commit while not checked out ' b'may lose data' ), hint=_(b'use -f to force'), ) ui.status(msg) if dryrun: return 0 parents = self.dirstate.parents() self.destroying() vfsmap = {b'plain': self.vfs, b'': self.svfs} transaction.rollback( self.svfs, vfsmap, b'undo', ui.warn, checkambigfiles=_cachedfiles ) bookmarksvfs = bookmarks.bookmarksvfs(self) if bookmarksvfs.exists(b'undo.bookmarks'): bookmarksvfs.rename( b'undo.bookmarks', b'bookmarks', checkambig=True ) if self.svfs.exists(b'undo.phaseroots'): self.svfs.rename(b'undo.phaseroots', b'phaseroots', checkambig=True) self.invalidate() has_node = self.changelog.index.has_node parentgone = any(not has_node(p) for p in parents) if parentgone: # prevent dirstateguard from overwriting already restored one dsguard.close() narrowspec.restorebackup(self, b'undo.narrowspec') narrowspec.restorewcbackup(self, b'undo.narrowspec.dirstate') self.dirstate.restorebackup(None, b'undo.dirstate') try: branch = self.vfs.read(b'undo.branch') self.dirstate.setbranch(encoding.tolocal(branch)) except IOError: ui.warn( _( b'named branch could not be reset: ' b'current branch is still \'%s\'\n' ) % self.dirstate.branch() ) parents = tuple([p.rev() for p in self[None].parents()]) if len(parents) > 1: ui.status( _( b'working directory now based on ' b'revisions %d and %d\n' ) % parents ) else: ui.status( _(b'working directory now based on revision %d\n') % parents ) mergestatemod.mergestate.clean(self) # TODO: if we know which new heads may result from this rollback, pass # them to destroy(), which will prevent the branchhead cache from being # invalidated. self.destroyed() return 0 def _buildcacheupdater(self, newtransaction): """called during transaction to build the callback updating cache Lives on the repository to help extension who might want to augment this logic. For this purpose, the created transaction is passed to the method. """ # we must avoid cyclic reference between repo and transaction. reporef = weakref.ref(self) def updater(tr): repo = reporef() repo.updatecaches(tr) return updater @unfilteredmethod def updatecaches(self, tr=None, full=False): """warm appropriate caches If this function is called after a transaction closed. The transaction will be available in the 'tr' argument. This can be used to selectively update caches relevant to the changes in that transaction. If 'full' is set, make sure all caches the function knows about have up-to-date data. Even the ones usually loaded more lazily. """ if tr is not None and tr.hookargs.get(b'source') == b'strip': # During strip, many caches are invalid but # later call to `destroyed` will refresh them. return if tr is None or tr.changes[b'origrepolen'] < len(self): # accessing the 'ser ved' branchmap should refresh all the others, self.ui.debug(b'updating the branch cache\n') self.filtered(b'served').branchmap() self.filtered(b'served.hidden').branchmap() if full: unfi = self.unfiltered() self.changelog.update_caches(transaction=tr) self.manifestlog.update_caches(transaction=tr) rbc = unfi.revbranchcache() for r in unfi.changelog: rbc.branchinfo(r) rbc.write() # ensure the working copy parents are in the manifestfulltextcache for ctx in self[b'.'].parents(): ctx.manifest() # accessing the manifest is enough # accessing fnode cache warms the cache tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs()) # accessing tags warm the cache self.tags() self.filtered(b'served').tags() # The `full` arg is documented as updating even the lazily-loaded # caches immediately, so we're forcing a write to cause these caches # to be warmed up even if they haven't explicitly been requested # yet (if they've never been used by hg, they won't ever have been # written, even if they're a subset of another kind of cache that # *has* been used). for filt in repoview.filtertable.keys(): filtered = self.filtered(filt) filtered.branchmap().write(filtered) def invalidatecaches(self): if '_tagscache' in vars(self): # can't use delattr on proxy del self.__dict__['_tagscache'] self._branchcaches.clear() self.invalidatevolatilesets() self._sparsesignaturecache.clear() def invalidatevolatilesets(self): self.filteredrevcache.clear() obsolete.clearobscaches(self) self._quick_access_changeid_invalidate() def invalidatedirstate(self): '''Invalidates the dirstate, causing the next call to dirstate to check if it was modified since the last time it was read, rereading it if it has. This is different to dirstate.invalidate() that it doesn't always rereads the dirstate. Use dirstate.invalidate() if you want to explicitly read the dirstate again (i.e. restoring it to a previous known good state).''' if hasunfilteredcache(self, 'dirstate'): for k in self.dirstate._filecache: try: delattr(self.dirstate, k) except AttributeError: pass delattr(self.unfiltered(), 'dirstate') def invalidate(self, clearfilecache=False): '''Invalidates both store and non-store parts other than dirstate If a transaction is running, invalidation of store is omitted, because discarding in-memory changes might cause inconsistency (e.g. incomplete fncache causes unintentional failure, but redundant one doesn't). ''' unfiltered = self.unfiltered() # all file caches are stored unfiltered for k in list(self._filecache.keys()): # dirstate is invalidated separately in invalidatedirstate() if k == b'dirstate': continue if ( k == b'changelog' and self.currenttransaction() and self.changelog._delayed ): # The changelog object may store unwritten revisions. We don't # want to lose them. # TODO: Solve the problem instead of working around it. continue if clearfilecache: del self._filecache[k] try: delattr(unfiltered, k) except AttributeError: pass self.invalidatecaches() if not self.currenttransaction(): # TODO: Changing contents of store outside transaction # causes inconsistency. We should make in-memory store # changes detectable, and abort if changed. self.store.invalidatecaches() def invalidateall(self): '''Fully invalidates both store and non-store parts, causing the subsequent operation to reread any outside changes.''' # extension should hook this to invalidate its caches self.invalidate() self.invalidatedirstate() @unfilteredmethod def _refreshfilecachestats(self, tr): """Reload stats of cached files so that they are flagged as valid""" for k, ce in self._filecache.items(): k = pycompat.sysstr(k) if k == 'dirstate' or k not in self.__dict__: continue ce.refresh() def _lock( self, vfs, lockname, wait, releasefn, acquirefn, desc, ): timeout = 0 warntimeout = 0 if wait: timeout = self.ui.configint(b"ui", b"timeout") warntimeout = self.ui.configint(b"ui", b"timeout.warn") # internal config: ui.signal-safe-lock signalsafe = self.ui.configbool(b'ui', b'signal-safe-lock') l = lockmod.trylock( self.ui, vfs, lockname, timeout, warntimeout, releasefn=releasefn, acquirefn=acquirefn, desc=desc, signalsafe=signalsafe, ) return l def _afterlock(self, callback): """add a callback to be run when the repository is fully unlocked The callback will be executed when the outermost lock is released (with wlock being higher level than 'lock').""" for ref in (self._wlockref, self._lockref): l = ref and ref() if l and l.held: l.postrelease.append(callback) break else: # no lock have been found. callback(True) def lock(self, wait=True): '''Lock the repository store (.hg/store) and return a weak reference to the lock. Use this before modifying the store (e.g. committing or stripping). If you are opening a transaction, get a lock as well.) If both 'lock' and 'wlock' must be acquired, ensure you always acquires 'wlock' first to avoid a dead-lock hazard.''' l = self._currentlock(self._lockref) if l is not None: l.lock() return l l = self._lock( vfs=self.svfs, lockname=b"lock", wait=wait, releasefn=None, acquirefn=self.invalidate, desc=_(b'repository %s') % self.origroot, ) self._lockref = weakref.ref(l) return l def wlock(self, wait=True): '''Lock the non-store parts of the repository (everything under .hg except .hg/store) and return a weak reference to the lock. Use this before modifying files in .hg. If both 'lock' and 'wlock' must be acquired, ensure you always acquires 'wlock' first to avoid a dead-lock hazard.''' l = self._wlockref and self._wlockref() if l is not None and l.held: l.lock() return l # We do not need to check for non-waiting lock acquisition. Such # acquisition would not cause dead-lock as they would just fail. if wait and ( self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(b'devel', b'check-locks') ): if self._currentlock(self._lockref) is not None: self.ui.develwarn(b'"wlock" acquired after "lock"') def unlock(): if self.dirstate.pendingparentchange(): self.dirstate.invalidate() else: self.dirstate.write(None) self._filecache[b'dirstate'].refresh() l = self._lock( self.vfs, b"wlock", wait, unlock, self.invalidatedirstate, _(b'working directory of %s') % self.origroot, ) self._wlockref = weakref.ref(l) return l def _currentlock(self, lockref): """Returns the lock if it's held, or None if it's not.""" if lockref is None: return None l = lockref() if l is None or not l.held: return None return l def currentwlock(self): """Returns the wlock if it's held, or None if it's not.""" return self._currentlock(self._wlockref) def checkcommitpatterns(self, wctx, match, status, fail): """check for commit arguments that aren't committable""" if match.isexact() or match.prefix(): matched = set(status.modified + status.added + status.removed) for f in match.files(): f = self.dirstate.normalize(f) if f == b'.' or f in matched or f in wctx.substate: continue if f in status.deleted: fail(f, _(b'file not found!')) # Is it a directory that exists or used to exist? if self.wvfs.isdir(f) or wctx.p1().hasdir(f): d = f + b'/' for mf in matched: if mf.startswith(d): break else: fail(f, _(b"no match under directory!")) elif f not in self.dirstate: fail(f, _(b"file not tracked!")) @unfilteredmethod def commit( self, text=b"", user=None, date=None, match=None, force=False, editor=None, extra=None, ): """Add a new revision to current repository. Revision information is gathered from the working directory, match can be used to filter the committed files. If editor is supplied, it is called to get a commit message. """ if extra is None: extra = {} def fail(f, msg): raise error.Abort(b'%s: %s' % (f, msg)) if not match: match = matchmod.always() if not force: match.bad = fail # lock() for recent changelog (see issue4368) with self.wlock(), self.lock(): wctx = self[None] merge = len(wctx.parents()) > 1 if not force and merge and not match.always(): raise error.Abort( _( b'cannot partially commit a merge ' b'(do not specify files or patterns)' ) ) status = self.status(match=match, clean=force) if force: status.modified.extend( status.clean ) # mq may commit clean files # check subrepos subs, commitsubs, newstate = subrepoutil.precommit( self.ui, wctx, status, match, force=force ) # make sure all explicit patterns are matched if not force: self.checkcommitpatterns(wctx, match, status, fail) cctx = context.workingcommitctx( self, status, text, user, date, extra ) ms = mergestatemod.mergestate.read(self) mergeutil.checkunresolved(ms) # internal config: ui.allowemptycommit if cctx.isempty() and not self.ui.configbool( b'ui', b'allowemptycommit' ): self.ui.debug(b'nothing to commit, clearing merge state\n') ms.reset() return None if merge and cctx.deleted(): raise error.Abort(_(b"cannot commit merge with missing files")) if editor: cctx._text = editor(self, cctx, subs) edited = text != cctx._text # Save commit message in case this transaction gets rolled back # (e.g. by a pretxncommit hook). Leave the content alone on # the assumption that the user will use the same editor again. msgfn = self.savecommitmessage(cctx._text) # commit subs and write new state if subs: uipathfn = scmutil.getuipathfn(self) for s in sorted(commitsubs): sub = wctx.sub(s) self.ui.status( _(b'committing subrepository %s\n') % uipathfn(subrepoutil.subrelpath(sub)) ) sr = sub.commit(cctx._text, user, date) newstate[s] = (newstate[s][0], sr) subrepoutil.writestate(self, newstate) p1, p2 = self.dirstate.parents() hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or b'') try: self.hook( b"precommit", throw=True, parent1=hookp1, parent2=hookp2 ) with self.transaction(b'commit'): ret = self.commitctx(cctx, True) # update bookmarks, dirstate and mergestate bookmarks.update(self, [p1, p2], ret) cctx.markcommitted(ret) ms.reset() except: # re-raises if edited: self.ui.write( _(b'note: commit message saved in %s\n') % msgfn ) self.ui.write( _( b"note: use 'hg commit --logfile " b".hg/last-message.txt --edit' to reuse it\n" ) ) raise def commithook(unused_success): # hack for command that use a temporary commit (eg: histedit) # temporary commit got stripped before hook release if self.changelog.hasnode(ret): self.hook( b"commit", node=hex(ret), parent1=hookp1, parent2=hookp2 ) self._afterlock(commithook) return ret @unfilteredmethod def commitctx(self, ctx, error=False, origctx=None): return commit.commitctx(self, ctx, error=error, origctx=origctx) @unfilteredmethod def destroying(self): '''Inform the repository that nodes are about to be destroyed. Intended for use by strip and rollback, so there's a common place for anything that has to be done before destroying history. This is mostly useful for saving state that is in memory and waiting to be flushed when the current lock is released. Because a call to destroyed is imminent, the repo will be invalidated causing those changes to stay in memory (waiting for the next unlock), or vanish completely. ''' # When using the same lock to commit and strip, the phasecache is left # dirty after committing. Then when we strip, the repo is invalidated, # causing those changes to disappear. if '_phasecache' in vars(self): self._phasecache.write() @unfilteredmethod def destroyed(self): '''Inform the repository that nodes have been destroyed. Intended for use by strip and rollback, so there's a common place for anything that has to be done after destroying history. ''' # When one tries to: # 1) destroy nodes thus calling this method (e.g. strip) # 2) use phasecache somewhere (e.g. commit) # # then 2) will fail because the phasecache contains nodes that were # removed. We can either remove phasecache from the filecache, # causing it to reload next time it is accessed, or simply filter # the removed nodes now and write the updated cache. self._phasecache.filterunknown(self) self._phasecache.write() # refresh all repository caches self.updatecaches() # Ensure the persistent tag cache is updated. Doing it now # means that the tag cache only has to worry about destroyed # heads immediately after a strip/rollback. That in turn # guarantees that "cachetip == currenttip" (comparing both rev # and node) always means no nodes have been added or destroyed. # XXX this is suboptimal when qrefresh'ing: we strip the current # head, refresh the tag cache, then immediately add a new head. # But I think doing it this way is necessary for the "instant # tag cache retrieval" case to work. self.invalidate() def status( self, node1=b'.', node2=None, match=None, ignored=False, clean=False, unknown=False, listsubrepos=False, ): '''a convenience method that calls node1.status(node2)''' return self[node1].status( node2, match, ignored, clean, unknown, listsubrepos ) def addpostdsstatus(self, ps): """Add a callback to run within the wlock, at the point at which status fixups happen. On status completion, callback(wctx, status) will be called with the wlock held, unless the dirstate has changed from underneath or the wlock couldn't be grabbed. Callbacks should not capture and use a cached copy of the dirstate -- it might change in the meanwhile. Instead, they should access the dirstate via wctx.repo().dirstate. This list is emptied out after each status run -- extensions should make sure it adds to this list each time dirstate.status is called. Extensions should also make sure they don't call this for statuses that don't involve the dirstate. """ # The list is located here for uniqueness reasons -- it is actually # managed by the workingctx, but that isn't unique per-repo. self._postdsstatus.append(ps) def postdsstatus(self): """Used by workingctx to get the list of post-dirstate-status hooks.""" return self._postdsstatus def clearpostdsstatus(self): """Used by workingctx to clear post-dirstate-status hooks.""" del self._postdsstatus[:] def heads(self, start=None): if start is None: cl = self.changelog headrevs = reversed(cl.headrevs()) return [cl.node(rev) for rev in headrevs] heads = self.changelog.heads(start) # sort the output in rev descending order return sorted(heads, key=self.changelog.rev, reverse=True) def branchheads(self, branch=None, start=None, closed=False): '''return a (possibly filtered) list of heads for the given branch Heads are returned in topological order, from newest to oldest. If branch is None, use the dirstate branch. If start is not None, return only heads reachable from start. If closed is True, return heads that are marked as closed as well. ''' if branch is None: branch = self[None].branch() branches = self.branchmap() if not branches.hasbranch(branch): return [] # the cache returns heads ordered lowest to highest bheads = list(reversed(branches.branchheads(branch, closed=closed))) if start is not None: # filter out the heads that cannot be reached from startrev fbheads = set(self.changelog.nodesbetween([start], bheads)[2]) bheads = [h for h in bheads if h in fbheads] return bheads def branches(self, nodes): if not nodes: nodes = [self.changelog.tip()] b = [] for n in nodes: t = n while True: p = self.changelog.parents(n) if p[1] != nullid or p[0] == nullid: b.append((t, n, p[0], p[1])) break n = p[0] return b def between(self, pairs): r = [] for top, bottom in pairs: n, l, i = top, [], 0 f = 1 while n != bottom and n != nullid: p = self.changelog.parents(n)[0] if i == f: l.append(n) f = f * 2 n = p i += 1 r.append(l) return r def checkpush(self, pushop): """Extensions can override this function if additional checks have to be performed before pushing, or call it if they override push command. """ @unfilteredpropertycache def prepushoutgoinghooks(self): """Return util.hooks consists of a pushop with repo, remote, outgoing methods, which are called before pushing changesets. """ return util.hooks() def pushkey(self, namespace, key, old, new): try: tr = self.currenttransaction() hookargs = {} if tr is not None: hookargs.update(tr.hookargs) hookargs = pycompat.strkwargs(hookargs) hookargs['namespace'] = namespace hookargs['key'] = key hookargs['old'] = old hookargs['new'] = new self.hook(b'prepushkey', throw=True, **hookargs) except error.HookAbort as exc: self.ui.write_err(_(b"pushkey-abort: %s\n") % exc) if exc.hint: self.ui.write_err(_(b"(%s)\n") % exc.hint) return False self.ui.debug(b'pushing key for "%s:%s"\n' % (namespace, key)) ret = pushkey.push(self, namespace, key, old, new) def runhook(unused_success): self.hook( b'pushkey', namespace=namespace, key=key, old=old, new=new, ret=ret, ) self._afterlock(runhook) return ret def listkeys(self, namespace): self.hook(b'prelistkeys', throw=True, namespace=namespace) self.ui.debug(b'listing keys for "%s"\n' % namespace) values = pushkey.list(self, namespace) self.hook(b'listkeys', namespace=namespace, values=values) return values def debugwireargs(self, one, two, three=None, four=None, five=None): '''used to test argument passing over the wire''' return b"%s %s %s %s %s" % ( one, two, pycompat.bytestr(three), pycompat.bytestr(four), pycompat.bytestr(five), ) def savecommitmessage(self, text): fp = self.vfs(b'last-message.txt', b'wb') try: fp.write(text) finally: fp.close() return self.pathto(fp.name[len(self.root) + 1 :]) # used to avoid circular references so destructors work def aftertrans(files): renamefiles = [tuple(t) for t in files] def a(): for vfs, src, dest in renamefiles: # if src and dest refer to a same file, vfs.rename is a no-op, # leaving both src and dest on disk. delete dest to make sure # the rename couldn't be such a no-op. vfs.tryunlink(dest) try: vfs.rename(src, dest) except OSError: # journal file does not yet exist pass return a def undoname(fn): base, name = os.path.split(fn) assert name.startswith(b'journal') return os.path.join(base, name.replace(b'journal', b'undo', 1)) def instance(ui, path, create, intents=None, createopts=None): localpath = util.urllocalpath(path) if create: createrepository(ui, localpath, createopts=createopts) return makelocalrepository(ui, localpath, intents=intents) def islocal(path): return True def defaultcreateopts(ui, createopts=None): """Populate the default creation options for a repository. A dictionary of explicitly requested creation options can be passed in. Missing keys will be populated. """ createopts = dict(createopts or {}) if b'backend' not in createopts: # experimental config: storage.new-repo-backend createopts[b'backend'] = ui.config(b'storage', b'new-repo-backend') return createopts def newreporequirements(ui, createopts): """Determine the set of requirements for a new local repository. Extensions can wrap this function to specify custom requirements for new repositories. """ # If the repo is being created from a shared repository, we copy # its requirements. if b'sharedrepo' in createopts: requirements = set(createopts[b'sharedrepo'].requirements) if createopts.get(b'sharedrelative'): requirements.add(requirementsmod.RELATIVE_SHARED_REQUIREMENT) else: requirements.add(requirementsmod.SHARED_REQUIREMENT) return requirements if b'backend' not in createopts: raise error.ProgrammingError( b'backend key not present in createopts; ' b'was defaultcreateopts() called?' ) if createopts[b'backend'] != b'revlogv1': raise error.Abort( _( b'unable to determine repository requirements for ' b'storage backend: %s' ) % createopts[b'backend'] ) requirements = {b'revlogv1'} if ui.configbool(b'format', b'usestore'): requirements.add(b'store') if ui.configbool(b'format', b'usefncache'): requirements.add(b'fncache') if ui.configbool(b'format', b'dotencode'): requirements.add(b'dotencode') compengines = ui.configlist(b'format', b'revlog-compression') for compengine in compengines: if compengine in util.compengines: break else: raise error.Abort( _( b'compression engines %s defined by ' b'format.revlog-compression not available' ) % b', '.join(b'"%s"' % e for e in compengines), hint=_( b'run "hg debuginstall" to list available ' b'compression engines' ), ) # zlib is the historical default and doesn't need an explicit requirement. if compengine == b'zstd': requirements.add(b'revlog-compression-zstd') elif compengine != b'zlib': requirements.add(b'exp-compression-%s' % compengine) if scmutil.gdinitconfig(ui): requirements.add(b'generaldelta') if ui.configbool(b'format', b'sparse-revlog'): requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT) # experimental config: format.exp-use-side-data if ui.configbool(b'format', b'exp-use-side-data'): requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) # experimental config: format.exp-use-copies-side-data-changeset if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'): requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) requirements.add(requirementsmod.COPIESSDC_REQUIREMENT) if ui.configbool(b'experimental', b'treemanifest'): requirements.add(requirementsmod.TREEMANIFEST_REQUIREMENT) revlogv2 = ui.config(b'experimental', b'revlogv2') if revlogv2 == b'enable-unstable-format-and-corrupt-my-data': requirements.remove(b'revlogv1') # generaldelta is implied by revlogv2. requirements.discard(b'generaldelta') requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) # experimental config: format.internal-phase if ui.configbool(b'format', b'internal-phase'): requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT) if createopts.get(b'narrowfiles'): requirements.add(requirementsmod.NARROW_REQUIREMENT) if createopts.get(b'lfs'): requirements.add(b'lfs') if ui.configbool(b'format', b'bookmarks-in-store'): requirements.add(bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT) if ui.configbool(b'format', b'use-persistent-nodemap'): requirements.add(requirementsmod.NODEMAP_REQUIREMENT) # if share-safe is enabled, let's create the new repository with the new # requirement if ui.configbool(b'format', b'exp-share-safe'): requirements.add(requirementsmod.SHARESAFE_REQUIREMENT) return requirements def checkrequirementscompat(ui, requirements): """ Checks compatibility of repository requirements enabled and disabled. Returns a set of requirements which needs to be dropped because dependend requirements are not enabled. Also warns users about it """ dropped = set() if b'store' not in requirements: if bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT in requirements: ui.warn( _( b'ignoring enabled \'format.bookmarks-in-store\' config ' b'beacuse it is incompatible with disabled ' b'\'format.usestore\' config\n' ) ) dropped.add(bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT) if ( requirementsmod.SHARED_REQUIREMENT in requirements or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements ): raise error.Abort( _( b"cannot create shared repository as source was created" b" with 'format.usestore' config disabled" ) ) if requirementsmod.SHARESAFE_REQUIREMENT in requirements: ui.warn( _( b"ignoring enabled 'format.exp-share-safe' config because " b"it is incompatible with disabled 'format.usestore'" b" config\n" ) ) dropped.add(requirementsmod.SHARESAFE_REQUIREMENT) return dropped def filterknowncreateopts(ui, createopts): """Filters a dict of repo creation options against options that are known. Receives a dict of repo creation options and returns a dict of those options that we don't know how to handle. This function is called as part of repository creation. If the returned dict contains any items, repository creation will not be allowed, as it means there was a request to create a repository with options not recognized by loaded code. Extensions can wrap this function to filter out creation options they know how to handle. """ known = { b'backend', b'lfs', b'narrowfiles', b'sharedrepo', b'sharedrelative', b'shareditems', b'shallowfilestore', } return {k: v for k, v in createopts.items() if k not in known} def createrepository(ui, path, createopts=None): """Create a new repository in a vfs. ``path`` path to the new repo's working directory. ``createopts`` options for the new repository. The following keys for ``createopts`` are recognized: backend The storage backend to use. lfs Repository will be created with ``lfs`` requirement. The lfs extension will automatically be loaded when the repository is accessed. narrowfiles Set up repository to support narrow file storage. sharedrepo Repository object from which storage should be shared. sharedrelative Boolean indicating if the path to the shared repo should be stored as relative. By default, the pointer to the "parent" repo is stored as an absolute path. shareditems Set of items to share to the new repository (in addition to storage). shallowfilestore Indicates that storage for files should be shallow (not all ancestor revisions are known). """ createopts = defaultcreateopts(ui, createopts=createopts) unknownopts = filterknowncreateopts(ui, createopts) if not isinstance(unknownopts, dict): raise error.ProgrammingError( b'filterknowncreateopts() did not return a dict' ) if unknownopts: raise error.Abort( _( b'unable to create repository because of unknown ' b'creation option: %s' ) % b', '.join(sorted(unknownopts)), hint=_(b'is a required extension not loaded?'), ) requirements = newreporequirements(ui, createopts=createopts) requirements -= checkrequirementscompat(ui, requirements) wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True) hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg')) if hgvfs.exists(): raise error.RepoError(_(b'repository %s already exists') % path) if b'sharedrepo' in createopts: sharedpath = createopts[b'sharedrepo'].sharedpath if createopts.get(b'sharedrelative'): try: sharedpath = os.path.relpath(sharedpath, hgvfs.base) except (IOError, ValueError) as e: # ValueError is raised on Windows if the drive letters differ # on each path. raise error.Abort( _(b'cannot calculate relative path'), hint=stringutil.forcebytestr(e), ) if not wdirvfs.exists(): wdirvfs.makedirs() hgvfs.makedir(notindexed=True) if b'sharedrepo' not in createopts: hgvfs.mkdir(b'cache') hgvfs.mkdir(b'wcache') if b'store' in requirements and b'sharedrepo' not in createopts: hgvfs.mkdir(b'store') # We create an invalid changelog outside the store so very old # Mercurial versions (which didn't know about the requirements # file) encounter an error on reading the changelog. This # effectively locks out old clients and prevents them from # mucking with a repo in an unknown format. # # The revlog header has version 2, which won't be recognized by # such old clients. hgvfs.append( b'00changelog.i', b'\0\0\0\2 dummy changelog to prevent using the old repo ' b'layout', ) # Filter the requirements into working copy and store ones wcreq, storereq = scmutil.filterrequirements(requirements) # write working copy ones scmutil.writerequires(hgvfs, wcreq) # If there are store requirements and the current repository # is not a shared one, write stored requirements # For new shared repository, we don't need to write the store # requirements as they are already present in store requires if storereq and b'sharedrepo' not in createopts: storevfs = vfsmod.vfs(hgvfs.join(b'store'), cacheaudited=True) scmutil.writerequires(storevfs, storereq) # Write out file telling readers where to find the shared store. if b'sharedrepo' in createopts: hgvfs.write(b'sharedpath', sharedpath) if createopts.get(b'shareditems'): shared = b'\n'.join(sorted(createopts[b'shareditems'])) + b'\n' hgvfs.write(b'shared', shared) def poisonrepository(repo): """Poison a repository instance so it can no longer be used.""" # Perform any cleanup on the instance. repo.close() # Our strategy is to replace the type of the object with one that # has all attribute lookups result in error. # # But we have to allow the close() method because some constructors # of repos call close() on repo references. class poisonedrepository(object): def __getattribute__(self, item): if item == 'close': return object.__getattribute__(self, item) raise error.ProgrammingError( b'repo instances should not be used after unshare' ) def close(self): pass # We may have a repoview, which intercepts __setattr__. So be sure # we operate at the lowest level possible. object.__setattr__(repo, '__class__', poisonedrepository) diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -1,437 +1,437 @@ Show all commands except debug commands $ hg debugcomplete abort add addremove annotate archive backout bisect bookmarks branch branches bundle cat clone commit config continue copy diff export files forget graft grep heads help identify import incoming init locate log manifest merge outgoing parents paths phase pull push recover remove rename resolve revert rollback root serve shelve status summary tag tags tip unbundle unshelve update verify version Show all commands that start with "a" $ hg debugcomplete a abort add addremove annotate archive Do not show debug commands if there are other candidates $ hg debugcomplete d diff Show debug commands if there are no other candidates $ hg debugcomplete debug debugancestor debugantivirusrunning debugapplystreamclonebundle debugbackupbundle debugbuilddag debugbundle debugcapabilities debugchangedfiles debugcheckstate debugcolor debugcommands debugcomplete debugconfig debugcreatestreamclonebundle debugdag debugdata debugdate debugdeltachain debugdirstate debugdiscovery debugdownload debugextensions debugfileset debugformat debugfsinfo debuggetbundle debugignore debugindex debugindexdot debugindexstats debuginstall debugknown debuglabelcomplete debuglocks debugmanifestfulltextcache debugmergestate debugnamecomplete debugnodemap debugobsolete debugp1copies debugp2copies debugpathcomplete debugpathcopies debugpeer debugpickmergetool debugpushkey debugpvec debugrebuilddirstate debugrebuildfncache debugrename debugrequires debugrevlog debugrevlogindex debugrevspec debugserve debugsetparents debugsidedata debugssl debugsub debugsuccessorssets debugtagscache debugtemplate debuguigetpass debuguiprompt debugupdatecaches debugupgraderepo debugwalk debugwhyunstable debugwireargs debugwireproto Do not show the alias of a debug command if there are other candidates (this should hide rawcommit) $ hg debugcomplete r recover remove rename resolve revert rollback root Show the alias of a debug command if there are no other candidates $ hg debugcomplete rawc Show the global options $ hg debugcomplete --options | sort --color --config --cwd --debug --debugger --encoding --encodingmode --help --hidden --noninteractive --pager --profile --quiet --repository --time --traceback --verbose --version -R -h -q -v -y Show the options for the "serve" command $ hg debugcomplete --options serve | sort --accesslog --address --certificate --cmdserver --color --config --cwd --daemon --daemon-postexec --debug --debugger --encoding --encodingmode --errorlog --help --hidden --ipv6 --name --noninteractive --pager --pid-file --port --prefix --print-url --profile --quiet --repository --stdio --style --subrepos --templates --time --traceback --verbose --version --web-conf -6 -A -E -R -S -a -d -h -n -p -q -t -v -y Show an error if we use --options with an ambiguous abbreviation $ hg debugcomplete --options s hg: command 's' is ambiguous: serve shelve showconfig status summary [255] Show all commands + options $ hg debugcommands abort: dry-run add: include, exclude, subrepos, dry-run addremove: similarity, subrepos, include, exclude, dry-run annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template archive: no-decode, prefix, rev, type, subrepos, include, exclude backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user bisect: reset, good, bad, skip, extend, command, noupdate bookmarks: force, rev, delete, rename, inactive, list, template branch: force, clean, rev branches: active, closed, rev, template bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure cat: output, rev, decode, include, exclude, template clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos - config: untrusted, edit, local, shared, global, template + config: untrusted, edit, local, shared, non-shared, global, template continue: dry-run copy: forget, after, at-rev, force, include, exclude, dry-run debugancestor: debugantivirusrunning: debugapplystreamclonebundle: debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template debugbuilddag: mergeable-file, overwritten-file, new-file debugbundle: all, part-type, spec debugcapabilities: debugchangedfiles: debugcheckstate: debugcolor: style debugcommands: debugcomplete: options debugcreatestreamclonebundle: debugdag: tags, branches, dots, spaces debugdata: changelog, manifest, dir debugdate: extended debugdeltachain: changelog, manifest, dir, template debugdirstate: nodates, dates, datesort debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure debugdownload: output debugextensions: template debugfileset: rev, all-files, show-matcher, show-stage debugformat: template debugfsinfo: debuggetbundle: head, common, type debugignore: debugindex: changelog, manifest, dir, template debugindexdot: changelog, manifest, dir debugindexstats: debuginstall: template debugknown: debuglabelcomplete: debuglocks: force-lock, force-wlock, set-lock, set-wlock debugmanifestfulltextcache: clear, add debugmergestate: style, template debugnamecomplete: debugnodemap: dump-new, dump-disk, check, metadata debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template debugp1copies: rev debugp2copies: rev debugpathcomplete: full, normal, added, removed debugpathcopies: include, exclude debugpeer: debugpickmergetool: rev, changedelete, include, exclude, tool debugpushkey: debugpvec: debugrebuilddirstate: rev, minimal debugrebuildfncache: debugrename: rev debugrequires: debugrevlog: changelog, manifest, dir, dump debugrevlogindex: changelog, manifest, dir, format debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized debugserve: sshstdio, logiofd, logiofile debugsetparents: debugsidedata: changelog, manifest, dir debugssl: debugsub: rev debugsuccessorssets: closest debugtagscache: debugtemplate: rev, define debuguigetpass: prompt debuguiprompt: prompt debugupdatecaches: debugupgraderepo: optimize, run, backup, changelog, manifest debugwalk: include, exclude debugwhyunstable: debugwireargs: three, four, five, ssh, remotecmd, insecure debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template files: rev, print0, include, exclude, template, subrepos forget: interactive, include, exclude, dry-run graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude heads: rev, topo, active, closed, style, template help: extension, command, keyword, system identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos init: ssh, remotecmd, insecure locate: rev, print0, fullpath, include, exclude log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude manifest: rev, all, template merge: force, rev, preview, abort, tool outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos parents: rev, style, template paths: template phase: public, draft, secret, force, rev pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure recover: verify remove: after, force, subrepos, include, exclude, dry-run rename: after, at-rev, force, include, exclude, dry-run resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template revert: all, date, rev, no-backup, interactive, include, exclude, dry-run rollback: dry-run, force root: template serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template summary: remote tag: force, local, rev, remove, edit, message, date, user tags: template tip: patch, git, style, template unbundle: update unshelve: abort, continue, interactive, keep, name, tool, date update: clean, check, merge, date, rev, tool verify: full version: template $ hg init a $ cd a $ echo fee > fee $ hg ci -q -Amfee $ hg tag fee $ mkdir fie $ echo dead > fie/dead $ echo live > fie/live $ hg bookmark fo $ hg branch -q fie $ hg ci -q -Amfie $ echo fo > fo $ hg branch -qf default $ hg ci -q -Amfo $ echo Fum > Fum $ hg ci -q -AmFum $ hg bookmark Fum Test debugpathcomplete $ hg debugpathcomplete f fee fie fo $ hg debugpathcomplete -f f fee fie/dead fie/live fo $ hg rm Fum $ hg debugpathcomplete -r F Fum Test debugnamecomplete $ hg debugnamecomplete Fum default fee fie fo tip $ hg debugnamecomplete f fee fie fo Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still used for completions in some shells. $ hg debuglabelcomplete Fum default fee fie fo tip $ hg debuglabelcomplete f fee fie fo diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -1,275 +1,294 @@ setup $ cat >> $HGRCPATH < [extensions] > share = > [format] > exp-share-safe = True > EOF prepare source repo $ hg init source $ cd source $ cat .hg/requires exp-sharesafe $ cat .hg/store/requires dotencode fncache generaldelta revlogv1 sparserevlog store $ hg debugrequirements dotencode exp-sharesafe fncache generaldelta revlogv1 sparserevlog store $ echo a > a $ hg ci -Aqm "added a" $ echo b > b $ hg ci -Aqm "added b" $ HGEDITOR=cat hg config --shared abort: repository is not shared; can't use --shared [255] $ cd .. Create a shared repo and check the requirements are shared and read correctly $ hg share source shared1 updating working directory 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd shared1 $ cat .hg/requires exp-sharesafe shared $ hg debugrequirements -R ../source dotencode exp-sharesafe fncache generaldelta revlogv1 sparserevlog store $ hg debugrequirements dotencode exp-sharesafe fncache generaldelta revlogv1 shared sparserevlog store $ echo c > c $ hg ci -Aqm "added c" Check that config of the source repository is also loaded $ hg showconfig ui.curses [1] $ echo "[ui]" >> ../source/.hg/hgrc $ echo "curses=true" >> ../source/.hg/hgrc $ hg showconfig ui.curses true Test that extensions of source repository are also loaded $ hg debugextensions share $ hg extdiff -p echo hg: unknown command 'extdiff' 'extdiff' is provided by the following extension: extdiff command to allow external programs to compare revisions (use 'hg help extensions' for information on enabling extensions) [255] $ echo "[extensions]" >> ../source/.hg/hgrc $ echo "extdiff=" >> ../source/.hg/hgrc $ hg debugextensions -R ../source extdiff share $ hg extdiff -R ../source -p echo BROKEN: the command below will not work if config of shared source is not loaded on dispatch but debugextensions says that extension is loaded $ hg debugextensions extdiff share $ hg extdiff -p echo However, local .hg/hgrc should override the config set by share source $ echo "[ui]" >> .hg/hgrc $ echo "curses=false" >> .hg/hgrc $ hg showconfig ui.curses false $ HGEDITOR=cat hg config --shared [ui] curses=true [extensions] extdiff= $ HGEDITOR=cat hg config --local [ui] curses=false Testing that hooks set in source repository also runs in shared repo $ cd ../source $ cat <> .hg/hgrc > [extensions] > hooklib= > [hooks] > pretxnchangegroup.reject_merge_commits = \ > python:hgext.hooklib.reject_merge_commits.hook > EOF $ cd .. $ hg clone source cloned updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd cloned $ hg up 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo bar > bar $ hg ci -Aqm "added bar" $ hg merge 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge commit" $ hg push ../source pushing to ../source searching for changes adding changesets adding manifests adding file changes error: pretxnchangegroup.reject_merge_commits hook failed: bcde3522682d rejected as merge on the same branch. Please consider rebase. transaction abort! rollback completed abort: bcde3522682d rejected as merge on the same branch. Please consider rebase. [255] $ hg push ../shared1 pushing to ../shared1 searching for changes adding changesets adding manifests adding file changes error: pretxnchangegroup.reject_merge_commits hook failed: bcde3522682d rejected as merge on the same branch. Please consider rebase. transaction abort! rollback completed abort: bcde3522682d rejected as merge on the same branch. Please consider rebase. [255] Test that if share source config is untrusted, we dont read it $ cd ../shared1 $ cat << EOF > $TESTTMP/untrusted.py > from mercurial import scmutil, util > def uisetup(ui): > class untrustedui(ui.__class__): > def _trusted(self, fp, f): > if util.normpath(fp.name).endswith(b'source/.hg/hgrc'): > return False > return super(untrustedui, self)._trusted(fp, f) > ui.__class__ = untrustedui > EOF $ hg showconfig hooks hooks.pretxnchangegroup.reject_merge_commits=python:hgext.hooklib.reject_merge_commits.hook $ hg showconfig hooks --config extensions.untrusted=$TESTTMP/untrusted.py [1] Update the source repository format and check that shared repo works $ cd ../source Disable zstd related tests because its not present on pure version #if zstd $ echo "[format]" >> .hg/hgrc $ echo "revlog-compression=zstd" >> .hg/hgrc $ hg debugupgraderepo --run -q upgrade will perform the following actions: requirements preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store added: revlog-compression-zstd $ hg log -r . changeset: 1:5f6d8a4bf34a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added b #endif $ echo "[format]" >> .hg/hgrc $ echo "use-persistent-nodemap=True" >> .hg/hgrc $ hg debugupgraderepo --run -q -R ../shared1 abort: cannot upgrade repository; unsupported source requirement: shared [255] $ hg debugupgraderepo --run -q upgrade will perform the following actions: requirements preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) added: persistent-nodemap $ hg log -r . changeset: 1:5f6d8a4bf34a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added b Shared one should work $ cd ../shared1 $ hg log -r . changeset: 2:155349b645be tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added c + +Testing that nonsharedrc is loaded for source and not shared + + $ cd ../source + $ touch .hg/hgrc-not-shared + $ echo "[ui]" >> .hg/hgrc-not-shared + $ echo "traceback=true" >> .hg/hgrc-not-shared + + $ hg showconfig ui.traceback + true + + $ HGEDITOR=cat hg config --non-shared + [ui] + traceback=true + + $ cd ../shared1 + $ hg showconfig ui.traceback + [1] + Unsharing works $ hg unshare Test that source config is added to the shared one after unshare, and the config of current repo is still respected over the config which came from source config $ cd ../cloned $ hg push ../shared1 pushing to ../shared1 searching for changes adding changesets adding manifests adding file changes error: pretxnchangegroup.reject_merge_commits hook failed: bcde3522682d rejected as merge on the same branch. Please consider rebase. transaction abort! rollback completed abort: bcde3522682d rejected as merge on the same branch. Please consider rebase. [255] $ hg showconfig ui.curses -R ../shared1 false