These used to be marked with no-op parens, but black removes those now
and this is more explicit.
- skip-blame: fallout from mass reformatting
indygreg |
hg-reviewers |
These used to be marked with no-op parens, but black removes those now
and this is more explicit.
Lint Skipped |
Unit Tests Skipped |
Path | Packages | |||
---|---|---|---|---|
M | contrib/debugshell.py (2 lines) | |||
M | contrib/perf.py (12 lines) | |||
M | doc/check-seclevel.py (12 lines) | |||
M | hgext/convert/cvsps.py (6 lines) | |||
M | hgext/hgk.py (8 lines) | |||
M | hgext/keyword.py (2 lines) | |||
M | hgext/phabricator.py (2 lines) | |||
M | hgext/remotefilelog/debugcommands.py (4 lines) | |||
M | hgext/show.py (4 lines) | |||
M | hgext/win32mbcs.py (2 lines) | |||
M | mercurial/bundle2.py (2 lines) | |||
M | mercurial/debugcommands.py (206 lines) | |||
M | mercurial/dispatch.py (2 lines) | |||
M | mercurial/sslutil.py (4 lines) | |||
M | tests/blackbox-readonly-dispatch.py (4 lines) | |||
M | tests/test-ui-color.py (4 lines) |
Commit | Parents | Author | Summary | Date |
---|---|---|---|---|
eca56bdc1247 | d526675d24ec | Augie Fackler | Oct 6 2019, 10:51 AM |
else: | else: | ||||
debugger = pycompat.sysstr(debugger) | debugger = pycompat.sysstr(debugger) | ||||
# if IPython doesn't exist, fallback to code.interact | # if IPython doesn't exist, fallback to code.interact | ||||
try: | try: | ||||
with demandimport.deactivated(): | with demandimport.deactivated(): | ||||
__import__(pdbmap[debugger]) | __import__(pdbmap[debugger]) | ||||
except ImportError: | except ImportError: | ||||
ui.warn( | ui.warnnoi18n( | ||||
b"%s debugger specified but %s module was not found\n" | b"%s debugger specified but %s module was not found\n" | ||||
% (debugger, pdbmap[debugger]) | % (debugger, pdbmap[debugger]) | ||||
) | ) | ||||
debugger = b'pdb' | debugger = b'pdb' | ||||
getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts) | getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts) |
path = ui.paths.getpath(dest, default=(b'default-push', b'default')) | path = ui.paths.getpath(dest, default=(b'default-push', b'default')) | ||||
if not path: | if not path: | ||||
raise error.Abort( | raise error.Abort( | ||||
b'default repository not configured!', | b'default repository not configured!', | ||||
hint=b"see 'hg help config.paths'", | hint=b"see 'hg help config.paths'", | ||||
) | ) | ||||
dest = path.pushloc or path.loc | dest = path.pushloc or path.loc | ||||
ui.status(b'analysing phase of %s\n' % util.hidepassword(dest)) | ui.statusnoi18n(b'analysing phase of %s\n' % util.hidepassword(dest)) | ||||
other = hg.peer(repo, opts, dest) | other = hg.peer(repo, opts, dest) | ||||
# easier to perform discovery through the operation | # easier to perform discovery through the operation | ||||
op = exchange.pushoperation(repo, other) | op = exchange.pushoperation(repo, other) | ||||
exchange._pushdiscoverychangeset(op) | exchange._pushdiscoverychangeset(op) | ||||
remotesubset = op.fallbackheads | remotesubset = op.fallbackheads | ||||
with other.commandexecutor() as e: | with other.commandexecutor() as e: | ||||
remotephases = e.callcommand( | remotephases = e.callcommand( | ||||
b'listkeys', {b'namespace': b'phases'} | b'listkeys', {b'namespace': b'phases'} | ||||
).result() | ).result() | ||||
del other | del other | ||||
publishing = remotephases.get(b'publishing', False) | publishing = remotephases.get(b'publishing', False) | ||||
if publishing: | if publishing: | ||||
ui.status(b'publishing: yes\n') | ui.statusnoi18n(b'publishing: yes\n') | ||||
else: | else: | ||||
ui.status(b'publishing: no\n') | ui.statusnoi18n(b'publishing: no\n') | ||||
nodemap = repo.changelog.nodemap | nodemap = repo.changelog.nodemap | ||||
nonpublishroots = 0 | nonpublishroots = 0 | ||||
for nhex, phase in remotephases.iteritems(): | for nhex, phase in remotephases.iteritems(): | ||||
if nhex == b'publishing': # ignore data related to publish option | if nhex == b'publishing': # ignore data related to publish option | ||||
continue | continue | ||||
node = bin(nhex) | node = bin(nhex) | ||||
if node in nodemap and int(phase): | if node in nodemap and int(phase): | ||||
nonpublishroots += 1 | nonpublishroots += 1 | ||||
ui.status(b'number of roots: %d\n' % len(remotephases)) | ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases)) | ||||
ui.status(b'number of known non public roots: %d\n' % nonpublishroots) | ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots) | ||||
def d(): | def d(): | ||||
phases.remotephasessummary(repo, remotesubset, remotephases) | phases.remotephasessummary(repo, remotesubset, remotephases) | ||||
timer(d) | timer(d) | ||||
fm.end() | fm.end() | ||||
"""microbenchmark ui.write | """microbenchmark ui.write | ||||
""" | """ | ||||
opts = _byteskwargs(opts) | opts = _byteskwargs(opts) | ||||
timer, fm = gettimer(ui, opts) | timer, fm = gettimer(ui, opts) | ||||
def write(): | def write(): | ||||
for i in range(100000): | for i in range(100000): | ||||
ui.write(b'Testing write performance\n') | ui.writenoi18n(b'Testing write performance\n') | ||||
timer(write) | timer(write) | ||||
fm.end() | fm.end() | ||||
def uisetup(ui): | def uisetup(ui): | ||||
if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr( | if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr( | ||||
commands, b'debugrevlogopts' | commands, b'debugrevlogopts' |
def showavailables(ui, initlevel): | def showavailables(ui, initlevel): | ||||
avail = ' available marks and order of them in this help: %s\n' % ( | avail = ' available marks and order of them in this help: %s\n' % ( | ||||
', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1 :]]) | ', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1 :]]) | ||||
) | ) | ||||
ui.warn(avail.encode('utf-8')) | ui.warn(avail.encode('utf-8')) | ||||
def checkseclevel(ui, doc, name, initlevel): | def checkseclevel(ui, doc, name, initlevel): | ||||
ui.note('checking "%s"\n' % name) | ui.notenoi18n('checking "%s"\n' % name) | ||||
if not isinstance(doc, bytes): | if not isinstance(doc, bytes): | ||||
doc = doc.encode('utf-8') | doc = doc.encode('utf-8') | ||||
blocks, pruned = minirst.parse(doc, 0, ['verbose']) | blocks, pruned = minirst.parse(doc, 0, ['verbose']) | ||||
errorcnt = 0 | errorcnt = 0 | ||||
curlevel = initlevel | curlevel = initlevel | ||||
for block in blocks: | for block in blocks: | ||||
if block[b'type'] != b'section': | if block[b'type'] != b'section': | ||||
continue | continue | ||||
mark = block[b'underline'] | mark = block[b'underline'] | ||||
title = block[b'lines'][0] | title = block[b'lines'][0] | ||||
if (mark not in mark2level) or (mark2level[mark] <= initlevel): | if (mark not in mark2level) or (mark2level[mark] <= initlevel): | ||||
ui.warn( | ui.warn( | ||||
( | ( | ||||
'invalid section mark %r for "%s" of %s\n' | 'invalid section mark %r for "%s" of %s\n' | ||||
% (mark * 4, title, name) | % (mark * 4, title, name) | ||||
).encode('utf-8') | ).encode('utf-8') | ||||
) | ) | ||||
showavailables(ui, initlevel) | showavailables(ui, initlevel) | ||||
errorcnt += 1 | errorcnt += 1 | ||||
continue | continue | ||||
nextlevel = mark2level[mark] | nextlevel = mark2level[mark] | ||||
if curlevel < nextlevel and curlevel + 1 != nextlevel: | if curlevel < nextlevel and curlevel + 1 != nextlevel: | ||||
ui.warn('gap of section level at "%s" of %s\n' % (title, name)) | ui.warnnoi18n('gap of section level at "%s" of %s\n' % (title, name)) | ||||
showavailables(ui, initlevel) | showavailables(ui, initlevel) | ||||
errorcnt += 1 | errorcnt += 1 | ||||
continue | continue | ||||
ui.note( | ui.notenoi18n( | ||||
'appropriate section level for "%s %s"\n' | 'appropriate section level for "%s %s"\n' | ||||
% (mark * (nextlevel * 2), title) | % (mark * (nextlevel * 2), title) | ||||
) | ) | ||||
curlevel = nextlevel | curlevel = nextlevel | ||||
return errorcnt | return errorcnt | ||||
def checkcmdtable(ui, cmdtable, namefmt, initlevel): | def checkcmdtable(ui, cmdtable, namefmt, initlevel): | ||||
errorcnt = 0 | errorcnt = 0 | ||||
for k, entry in cmdtable.items(): | for k, entry in cmdtable.items(): | ||||
name = k.split(b"|")[0].lstrip(b"^") | name = k.split(b"|")[0].lstrip(b"^") | ||||
if not entry[0].__doc__: | if not entry[0].__doc__: | ||||
ui.note('skip checking %s: no help document\n' % (namefmt % name)) | ui.notenoi18n('skip checking %s: no help document\n' % (namefmt % name)) | ||||
continue | continue | ||||
errorcnt += checkseclevel( | errorcnt += checkseclevel( | ||||
ui, entry[0].__doc__, namefmt % name, initlevel | ui, entry[0].__doc__, namefmt % name, initlevel | ||||
) | ) | ||||
return errorcnt | return errorcnt | ||||
def checkhghelps(ui): | def checkhghelps(ui): | ||||
errorcnt = 0 | errorcnt = 0 | ||||
for h in helptable: | for h in helptable: | ||||
names, sec, doc = h[0:3] | names, sec, doc = h[0:3] | ||||
if callable(doc): | if callable(doc): | ||||
doc = doc(ui) | doc = doc(ui) | ||||
errorcnt += checkseclevel( | errorcnt += checkseclevel( | ||||
ui, doc, '%s help topic' % names[0], initlevel_topic | ui, doc, '%s help topic' % names[0], initlevel_topic | ||||
) | ) | ||||
errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd) | errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd) | ||||
for name in sorted( | for name in sorted( | ||||
list(extensions.enabled()) + list(extensions.disabled()) | list(extensions.enabled()) + list(extensions.disabled()) | ||||
): | ): | ||||
mod = extensions.load(ui, name, None) | mod = extensions.load(ui, name, None) | ||||
if not mod.__doc__: | if not mod.__doc__: | ||||
ui.note('skip checking %s extension: no help document\n' % name) | ui.notenoi18n('skip checking %s extension: no help document\n' % name) | ||||
continue | continue | ||||
errorcnt += checkseclevel( | errorcnt += checkseclevel( | ||||
ui, mod.__doc__, '%s extension' % name, initlevel_ext | ui, mod.__doc__, '%s extension' % name, initlevel_ext | ||||
) | ) | ||||
cmdtable = getattr(mod, 'cmdtable', None) | cmdtable = getattr(mod, 'cmdtable', None) | ||||
if cmdtable: | if cmdtable: | ||||
errorcnt += checkcmdtable( | errorcnt += checkcmdtable( | ||||
ui, | ui, | ||||
cmdtable, | cmdtable, | ||||
'%%s command of %s extension' % name, | '%%s command of %s extension' % name, | ||||
initlevel_ext_cmd, | initlevel_ext_cmd, | ||||
) | ) | ||||
return errorcnt | return errorcnt | ||||
def checkfile(ui, filename, initlevel): | def checkfile(ui, filename, initlevel): | ||||
if filename == '-': | if filename == '-': | ||||
filename = 'stdin' | filename = 'stdin' | ||||
doc = sys.stdin.read() | doc = sys.stdin.read() | ||||
else: | else: | ||||
with open(filename) as fp: | with open(filename) as fp: | ||||
doc = fp.read() | doc = fp.read() | ||||
ui.note( | ui.notenoi18n( | ||||
'checking input from %s with initlevel %d\n' % (filename, initlevel) | 'checking input from %s with initlevel %d\n' % (filename, initlevel) | ||||
) | ) | ||||
return checkseclevel(ui, doc, 'input from %s' % filename, initlevel) | return checkseclevel(ui, doc, 'input from %s' % filename, initlevel) | ||||
def main(): | def main(): | ||||
optparser = optparse.OptionParser( | optparser = optparse.OptionParser( | ||||
"""%prog [options] | """%prog [options] |
b'Tag%s: %s \n' | b'Tag%s: %s \n' | ||||
% ( | % ( | ||||
[b'', b's'][len(cs.tags) > 1], | [b'', b's'][len(cs.tags) > 1], | ||||
b','.join(cs.tags) or b'(none)', | b','.join(cs.tags) or b'(none)', | ||||
) | ) | ||||
) | ) | ||||
) | ) | ||||
if cs.branchpoints: | if cs.branchpoints: | ||||
ui.write( | ui.writenoi18n( | ||||
b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints)) | b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints)) | ||||
) | ) | ||||
if opts[b"parents"] and cs.parents: | if opts[b"parents"] and cs.parents: | ||||
if len(cs.parents) > 1: | if len(cs.parents) > 1: | ||||
ui.write( | ui.write( | ||||
( | ( | ||||
b'Parents: %s\n' | b'Parents: %s\n' | ||||
% (b','.join([(b"%d" % p.id) for p in cs.parents])) | % (b','.join([(b"%d" % p.id) for p in cs.parents])) | ||||
) | ) | ||||
) | ) | ||||
else: | else: | ||||
ui.write((b'Parent: %d\n' % cs.parents[0].id)) | ui.write((b'Parent: %d\n' % cs.parents[0].id)) | ||||
if opts[b"ancestors"]: | if opts[b"ancestors"]: | ||||
b = cs.branch | b = cs.branch | ||||
r = [] | r = [] | ||||
while b: | while b: | ||||
b, c = ancestors[b] | b, c = ancestors[b] | ||||
r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b])) | r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b])) | ||||
if r: | if r: | ||||
ui.write((b'Ancestors: %s\n' % (b','.join(r)))) | ui.write((b'Ancestors: %s\n' % (b','.join(r)))) | ||||
ui.write(b'Log:\n') | ui.writenoi18n(b'Log:\n') | ||||
ui.write(b'%s\n\n' % cs.comment) | ui.write(b'%s\n\n' % cs.comment) | ||||
ui.write(b'Members: \n') | ui.writenoi18n(b'Members: \n') | ||||
for f in cs.entries: | for f in cs.entries: | ||||
fn = f.file | fn = f.file | ||||
if fn.startswith(opts[b"prefix"]): | if fn.startswith(opts[b"prefix"]): | ||||
fn = fn[len(opts[b"prefix"]) :] | fn = fn[len(opts[b"prefix"]) :] | ||||
ui.write( | ui.write( | ||||
b'\t%s:%s->%s%s \n' | b'\t%s:%s->%s%s \n' | ||||
% ( | % ( | ||||
fn, | fn, |
mmap = repo[node1].manifest() | mmap = repo[node1].manifest() | ||||
mmap2 = repo[node2].manifest() | mmap2 = repo[node2].manifest() | ||||
m = scmutil.match(repo[node1], files) | m = scmutil.match(repo[node1], files) | ||||
modified, added, removed = repo.status(node1, node2, m)[:3] | modified, added, removed = repo.status(node1, node2, m)[:3] | ||||
empty = short(nullid) | empty = short(nullid) | ||||
for f in modified: | for f in modified: | ||||
# TODO get file permissions | # TODO get file permissions | ||||
ui.write( | ui.writenoi18n( | ||||
b":100664 100664 %s %s M\t%s\t%s\n" | b":100664 100664 %s %s M\t%s\t%s\n" | ||||
% (short(mmap[f]), short(mmap2[f]), f, f) | % (short(mmap[f]), short(mmap2[f]), f, f) | ||||
) | ) | ||||
for f in added: | for f in added: | ||||
ui.write( | ui.writenoi18n( | ||||
b":000000 100664 %s %s N\t%s\t%s\n" | b":000000 100664 %s %s N\t%s\t%s\n" | ||||
% (empty, short(mmap2[f]), f, f) | % (empty, short(mmap2[f]), f, f) | ||||
) | ) | ||||
for f in removed: | for f in removed: | ||||
ui.write( | ui.writenoi18n( | ||||
b":100664 000000 %s %s D\t%s\t%s\n" | b":100664 000000 %s %s D\t%s\t%s\n" | ||||
% (short(mmap[f]), empty, f, f) | % (short(mmap[f]), empty, f, f) | ||||
) | ) | ||||
## | ## | ||||
while True: | while True: | ||||
if opts[r'stdin']: | if opts[r'stdin']: | ||||
if b'committer' in ctx.extra(): | if b'committer' in ctx.extra(): | ||||
ui.write((b"committer %s\n" % ctx.extra()[b'committer'])) | ui.write((b"committer %s\n" % ctx.extra()[b'committer'])) | ||||
ui.write((b"revision %d\n" % ctx.rev())) | ui.write((b"revision %d\n" % ctx.rev())) | ||||
ui.write((b"branch %s\n" % ctx.branch())) | ui.write((b"branch %s\n" % ctx.branch())) | ||||
if obsolete.isenabled(repo, obsolete.createmarkersopt): | if obsolete.isenabled(repo, obsolete.createmarkersopt): | ||||
if ctx.obsolete(): | if ctx.obsolete(): | ||||
ui.write(b"obsolete\n") | ui.writenoi18n(b"obsolete\n") | ||||
ui.write((b"phase %s\n\n" % ctx.phasestr())) | ui.write((b"phase %s\n\n" % ctx.phasestr())) | ||||
if prefix != b"": | if prefix != b"": | ||||
ui.write( | ui.write( | ||||
b"%s%s\n" % (prefix, description.replace(b'\n', nlprefix).strip()) | b"%s%s\n" % (prefix, description.replace(b'\n', nlprefix).strip()) | ||||
) | ) | ||||
else: | else: | ||||
ui.write(description + b"\n") | ui.write(description + b"\n") |
ui.status(_(b'\n\tconfiguration using current keyword template maps\n')) | ui.status(_(b'\n\tconfiguration using current keyword template maps\n')) | ||||
if uikwmaps: | if uikwmaps: | ||||
kwmaps = dict(uikwmaps) | kwmaps = dict(uikwmaps) | ||||
else: | else: | ||||
kwmaps = _defaultkwmaps(ui) | kwmaps = _defaultkwmaps(ui) | ||||
uisetup(ui) | uisetup(ui) | ||||
reposetup(ui, repo) | reposetup(ui, repo) | ||||
ui.write(b'[extensions]\nkeyword =\n') | ui.writenoi18n(b'[extensions]\nkeyword =\n') | ||||
demoitems(b'keyword', ui.configitems(b'keyword')) | demoitems(b'keyword', ui.configitems(b'keyword')) | ||||
demoitems(b'keywordset', ui.configitems(b'keywordset')) | demoitems(b'keywordset', ui.configitems(b'keywordset')) | ||||
demoitems(b'keywordmaps', kwmaps.iteritems()) | demoitems(b'keywordmaps', kwmaps.iteritems()) | ||||
keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n' | keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n' | ||||
repo.wvfs.write(fn, keywords) | repo.wvfs.write(fn, keywords) | ||||
repo[None].add([fn]) | repo[None].add([fn]) | ||||
ui.note(_(b'\nkeywords written to %s:\n') % fn) | ui.note(_(b'\nkeywords written to %s:\n') % fn) | ||||
ui.note(keywords) | ui.note(keywords) |
mapping[old.node()] = [newnode] | mapping[old.node()] = [newnode] | ||||
# Update diff property | # Update diff property | ||||
# If it fails just warn and keep going, otherwise the DREV | # If it fails just warn and keep going, otherwise the DREV | ||||
# associations will be lost | # associations will be lost | ||||
try: | try: | ||||
writediffproperties(unfi[newnode], diffmap[old.node()]) | writediffproperties(unfi[newnode], diffmap[old.node()]) | ||||
except util.urlerr.urlerror: | except util.urlerr.urlerror: | ||||
ui.warn(b'Failed to update metadata for D%s\n' % drevid) | ui.warnnoi18n(b'Failed to update metadata for D%s\n' % drevid) | ||||
# Remove local tags since it's no longer necessary | # Remove local tags since it's no longer necessary | ||||
tagname = b'D%d' % drevid | tagname = b'D%d' % drevid | ||||
if tagname in repo.tags(): | if tagname in repo.tags(): | ||||
tags.tag( | tags.tag( | ||||
repo, | repo, | ||||
tagname, | tagname, | ||||
nullid, | nullid, | ||||
message=None, | message=None, |
def debugindexdot(orig, ui, repo, file_): | def debugindexdot(orig, ui, repo, file_): | ||||
"""dump an index DAG as a graphviz dot file""" | """dump an index DAG as a graphviz dot file""" | ||||
if not shallowutil.isenabled(repo): | if not shallowutil.isenabled(repo): | ||||
return orig(ui, repo, file_) | return orig(ui, repo, file_) | ||||
r = buildtemprevlog(repo, os.path.basename(file_)[:-2]) | r = buildtemprevlog(repo, os.path.basename(file_)[:-2]) | ||||
ui.write(b"digraph G {\n") | ui.writenoi18n(b"digraph G {\n") | ||||
for i in r: | for i in r: | ||||
node = r.node(i) | node = r.node(i) | ||||
pp = r.parents(node) | pp = r.parents(node) | ||||
ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | ||||
if pp[1] != nullid: | if pp[1] != nullid: | ||||
ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | ||||
ui.write(b"}\n") | ui.write(b"}\n") | ||||
if not totaldeltasize or not totalblobsize: | if not totaldeltasize or not totalblobsize: | ||||
return | return | ||||
difference = totalblobsize - totaldeltasize | difference = totalblobsize - totaldeltasize | ||||
deltastr = b"%0.1f%% %s" % ( | deltastr = b"%0.1f%% %s" % ( | ||||
(100.0 * abs(difference) / totalblobsize), | (100.0 * abs(difference) / totalblobsize), | ||||
(b"smaller" if difference > 0 else b"bigger"), | (b"smaller" if difference > 0 else b"bigger"), | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b"Total:%s%s %s (%s)\n" | b"Total:%s%s %s (%s)\n" | ||||
% ( | % ( | ||||
b"".ljust(2 * hashlen - len(b"Total:")), | b"".ljust(2 * hashlen - len(b"Total:")), | ||||
(b'%d' % totaldeltasize).ljust(12), | (b'%d' % totaldeltasize).ljust(12), | ||||
(b'%d' % totalblobsize).ljust(9), | (b'%d' % totalblobsize).ljust(9), | ||||
deltastr, | deltastr, | ||||
) | ) | ||||
) | ) |
for i, rev in enumerate(sortedheads): | for i, rev in enumerate(sortedheads): | ||||
ctx = repo[rev] | ctx = repo[rev] | ||||
if i: | if i: | ||||
ui.write(b': ') | ui.write(b': ') | ||||
else: | else: | ||||
ui.write(b' ') | ui.write(b' ') | ||||
ui.write(b'o ') | ui.writenoi18n(b'o ') | ||||
displayer.show(ctx, nodelen=nodelen) | displayer.show(ctx, nodelen=nodelen) | ||||
displayer.flush(ctx) | displayer.flush(ctx) | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
if i: | if i: | ||||
ui.write(b':/') | ui.write(b':/') | ||||
else: | else: | ||||
ui.write(b' /') | ui.write(b' /') | ||||
# Vertically and horizontally separate stack base from parent | # Vertically and horizontally separate stack base from parent | ||||
# to reinforce stack boundary. | # to reinforce stack boundary. | ||||
if newheads: | if newheads: | ||||
ui.write(b':/ ') | ui.write(b':/ ') | ||||
else: | else: | ||||
ui.write(b' / ') | ui.write(b' / ') | ||||
ui.write(_(b'(stack base)'), b'\n', label=b'stack.label') | ui.write(_(b'(stack base)'), b'\n', label=b'stack.label') | ||||
ui.write(b'o ') | ui.writenoi18n(b'o ') | ||||
displayer.show(basectx, nodelen=nodelen) | displayer.show(basectx, nodelen=nodelen) | ||||
displayer.flush(basectx) | displayer.flush(basectx) | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
@revsetpredicate(b'_underway([commitage[, headage]])') | @revsetpredicate(b'_underway([commitage[, headage]])') | ||||
def underwayrevset(repo, subset, x): | def underwayrevset(repo, subset, x): |
wrapname(b"mercurial.windows.listdir", wrapperforlistdir) | wrapname(b"mercurial.windows.listdir", wrapperforlistdir) | ||||
# wrap functions to be called with local byte string arguments | # wrap functions to be called with local byte string arguments | ||||
for f in rfuncs.split(): | for f in rfuncs.split(): | ||||
wrapname(f, reversewrapper) | wrapname(f, reversewrapper) | ||||
# Check sys.args manually instead of using ui.debug() because | # Check sys.args manually instead of using ui.debug() because | ||||
# command line options is not yet applied when | # command line options is not yet applied when | ||||
# extensions.loadall() is called. | # extensions.loadall() is called. | ||||
if b'--debug' in sys.argv: | if b'--debug' in sys.argv: | ||||
ui.write(b"[win32mbcs] activated with encoding: %s\n" % _encoding) | ui.writenoi18n(b"[win32mbcs] activated with encoding: %s\n" % _encoding) |
@parthandler(b'obsmarkers') | @parthandler(b'obsmarkers') | ||||
def handleobsmarker(op, inpart): | def handleobsmarker(op, inpart): | ||||
"""add a stream of obsmarkers to the repo""" | """add a stream of obsmarkers to the repo""" | ||||
tr = op.gettransaction() | tr = op.gettransaction() | ||||
markerdata = inpart.read() | markerdata = inpart.read() | ||||
if op.ui.config(b'experimental', b'obsmarkers-exchange-debug'): | if op.ui.config(b'experimental', b'obsmarkers-exchange-debug'): | ||||
op.ui.write( | op.ui.writenoi18n( | ||||
b'obsmarker-exchange: %i bytes received\n' % len(markerdata) | b'obsmarker-exchange: %i bytes received\n' % len(markerdata) | ||||
) | ) | ||||
# The mergemarkers call will crash if marker creation is not enabled. | # The mergemarkers call will crash if marker creation is not enabled. | ||||
# we want to avoid this if the part is advisory. | # we want to avoid this if the part is advisory. | ||||
if not inpart.mandatory and op.repo.obsstore.readonly: | if not inpart.mandatory and op.repo.obsstore.readonly: | ||||
op.repo.ui.debug( | op.repo.ui.debug( | ||||
b'ignoring obsolescence markers, feature not enabled\n' | b'ignoring obsolescence markers, feature not enabled\n' | ||||
) | ) |
if tags: | if tags: | ||||
repo.vfs.write(b"localtags", b"".join(tags)) | repo.vfs.write(b"localtags", b"".join(tags)) | ||||
def _debugchangegroup(ui, gen, all=None, indent=0, **opts): | def _debugchangegroup(ui, gen, all=None, indent=0, **opts): | ||||
indent_string = b' ' * indent | indent_string = b' ' * indent | ||||
if all: | if all: | ||||
ui.write( | ui.writenoi18n( | ||||
b"%sformat: id, p1, p2, cset, delta base, len(delta)\n" | b"%sformat: id, p1, p2, cset, delta base, len(delta)\n" | ||||
% indent_string | % indent_string | ||||
) | ) | ||||
def showchunks(named): | def showchunks(named): | ||||
ui.write(b"\n%s%s\n" % (indent_string, named)) | ui.write(b"\n%s%s\n" % (indent_string, named)) | ||||
for deltadata in gen.deltaiter(): | for deltadata in gen.deltaiter(): | ||||
node, p1, p2, cs, deltabase, delta, flags = deltadata | node, p1, p2, cs, deltabase, delta, flags = deltadata | ||||
@command(b'debugcapabilities', [], _(b'PATH'), norepo=True) | @command(b'debugcapabilities', [], _(b'PATH'), norepo=True) | ||||
def debugcapabilities(ui, path, **opts): | def debugcapabilities(ui, path, **opts): | ||||
"""lists the capabilities of a remote peer""" | """lists the capabilities of a remote peer""" | ||||
opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
peer = hg.peer(ui, opts, path) | peer = hg.peer(ui, opts, path) | ||||
caps = peer.capabilities() | caps = peer.capabilities() | ||||
ui.write(b'Main capabilities:\n') | ui.writenoi18n(b'Main capabilities:\n') | ||||
for c in sorted(caps): | for c in sorted(caps): | ||||
ui.write(b' %s\n' % c) | ui.write(b' %s\n' % c) | ||||
b2caps = bundle2.bundle2caps(peer) | b2caps = bundle2.bundle2caps(peer) | ||||
if b2caps: | if b2caps: | ||||
ui.write(b'Bundle2 capabilities:\n') | ui.writenoi18n(b'Bundle2 capabilities:\n') | ||||
for key, values in sorted(b2caps.iteritems()): | for key, values in sorted(b2caps.iteritems()): | ||||
ui.write(b' %s\n' % key) | ui.write(b' %s\n' % key) | ||||
for v in values: | for v in values: | ||||
ui.write(b' %s\n' % v) | ui.write(b' %s\n' % v) | ||||
@command(b'debugcheckstate', [], b'') | @command(b'debugcheckstate', [], b'') | ||||
def debugcheckstate(ui, repo): | def debugcheckstate(ui, repo): | ||||
@command( | @command( | ||||
b'debugcolor', | b'debugcolor', | ||||
[(b'', b'style', None, _(b'show all configured styles'))], | [(b'', b'style', None, _(b'show all configured styles'))], | ||||
b'hg debugcolor', | b'hg debugcolor', | ||||
) | ) | ||||
def debugcolor(ui, repo, **opts): | def debugcolor(ui, repo, **opts): | ||||
"""show available color, effects or style""" | """show available color, effects or style""" | ||||
ui.write(b'color mode: %s\n' % stringutil.pprint(ui._colormode)) | ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode)) | ||||
if opts.get(r'style'): | if opts.get(r'style'): | ||||
return _debugdisplaystyle(ui) | return _debugdisplaystyle(ui) | ||||
else: | else: | ||||
return _debugdisplaycolor(ui) | return _debugdisplaycolor(ui) | ||||
def _debugdisplaycolor(ui): | def _debugdisplaycolor(ui): | ||||
ui = ui.copy() | ui = ui.copy() | ||||
optionalrepo=True, | optionalrepo=True, | ||||
) | ) | ||||
def debugdate(ui, date, range=None, **opts): | def debugdate(ui, date, range=None, **opts): | ||||
"""parse and display a date""" | """parse and display a date""" | ||||
if opts[r"extended"]: | if opts[r"extended"]: | ||||
d = dateutil.parsedate(date, util.extendeddateformats) | d = dateutil.parsedate(date, util.extendeddateformats) | ||||
else: | else: | ||||
d = dateutil.parsedate(date) | d = dateutil.parsedate(date) | ||||
ui.write(b"internal: %d %d\n" % d) | ui.writenoi18n(b"internal: %d %d\n" % d) | ||||
ui.write(b"standard: %s\n" % dateutil.datestr(d)) | ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d)) | ||||
if range: | if range: | ||||
m = dateutil.matchdate(range) | m = dateutil.matchdate(range) | ||||
ui.write(b"match: %s\n" % m(d[0])) | ui.writenoi18n(b"match: %s\n" % m(d[0])) | ||||
@command( | @command( | ||||
b'debugdeltachain', | b'debugdeltachain', | ||||
cmdutil.debugrevlogopts + cmdutil.formatteropts, | cmdutil.debugrevlogopts + cmdutil.formatteropts, | ||||
_(b'-c|-m|FILE'), | _(b'-c|-m|FILE'), | ||||
optionalrepo=True, | optionalrepo=True, | ||||
) | ) | ||||
if not util.safehasattr(remote, b'branches'): | if not util.safehasattr(remote, b'branches'): | ||||
# enable in-client legacy support | # enable in-client legacy support | ||||
remote = localrepo.locallegacypeer(remote.local()) | remote = localrepo.locallegacypeer(remote.local()) | ||||
common, _in, hds = treediscovery.findcommonincoming( | common, _in, hds = treediscovery.findcommonincoming( | ||||
repo, remote, force=True | repo, remote, force=True | ||||
) | ) | ||||
common = set(common) | common = set(common) | ||||
if not opts.get(b'nonheads'): | if not opts.get(b'nonheads'): | ||||
ui.write( | ui.writenoi18n( | ||||
b"unpruned common: %s\n" | b"unpruned common: %s\n" | ||||
% b" ".join(sorted(short(n) for n in common)) | % b" ".join(sorted(short(n) for n in common)) | ||||
) | ) | ||||
clnode = repo.changelog.node | clnode = repo.changelog.node | ||||
common = repo.revs(b'heads(::%ln)', common) | common = repo.revs(b'heads(::%ln)', common) | ||||
common = {clnode(r) for r in common} | common = {clnode(r) for r in common} | ||||
return common, hds | return common, hds | ||||
data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local'] | data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local'] | ||||
data[b'nb-remote'] = len(rheads) | data[b'nb-remote'] = len(rheads) | ||||
data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote'] | data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote'] | ||||
data[b'nb-revs'] = len(repo.revs(b'all()')) | data[b'nb-revs'] = len(repo.revs(b'all()')) | ||||
data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common)) | data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common)) | ||||
data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common'] | data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common'] | ||||
# display discovery summary | # display discovery summary | ||||
ui.write(b"elapsed time: %(elapsed)f seconds\n" % data) | ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data) | ||||
ui.write(b"heads summary:\n") | ui.writenoi18n(b"heads summary:\n") | ||||
ui.write(b" total common heads: %(nb-common)9d\n" % data) | ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data) | ||||
ui.write(b" also local heads: %(nb-common-local)9d\n" % data) | ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data) | ||||
ui.write(b" also remote heads: %(nb-common-remote)9d\n" % data) | ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data) | ||||
ui.write(b" both: %(nb-common-both)9d\n" % data) | ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data) | ||||
ui.write(b" local heads: %(nb-local)9d\n" % data) | ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data) | ||||
ui.write(b" common: %(nb-common-local)9d\n" % data) | ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data) | ||||
ui.write(b" missing: %(nb-local-missing)9d\n" % data) | ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data) | ||||
ui.write(b" remote heads: %(nb-remote)9d\n" % data) | ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data) | ||||
ui.write(b" common: %(nb-common-remote)9d\n" % data) | ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data) | ||||
ui.write(b" unknown: %(nb-remote-unknown)9d\n" % data) | ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data) | ||||
ui.write(b"local changesets: %(nb-revs)9d\n" % data) | ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data) | ||||
ui.write(b" common: %(nb-revs-common)9d\n" % data) | ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data) | ||||
ui.write(b" missing: %(nb-revs-missing)9d\n" % data) | ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data) | ||||
if ui.verbose: | if ui.verbose: | ||||
ui.write( | ui.writenoi18n( | ||||
b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common)) | b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common)) | ||||
) | ) | ||||
_chunksize = 4 << 10 | _chunksize = 4 << 10 | ||||
@command( | @command( | ||||
) | ) | ||||
files.update(wctx.substate) | files.update(wctx.substate) | ||||
else: | else: | ||||
files.update(ctx.files()) | files.update(ctx.files()) | ||||
files.update(ctx.substate) | files.update(ctx.substate) | ||||
m = ctx.matchfileset(expr) | m = ctx.matchfileset(expr) | ||||
if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose): | if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose): | ||||
ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') | ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') | ||||
for f in sorted(files): | for f in sorted(files): | ||||
if not m(f): | if not m(f): | ||||
continue | continue | ||||
ui.write(b"%s\n" % f) | ui.write(b"%s\n" % f) | ||||
@command(b'debugformat', [] + cmdutil.formatteropts) | @command(b'debugformat', [] + cmdutil.formatteropts) | ||||
def debugformat(ui, repo, **opts): | def debugformat(ui, repo, **opts): | ||||
) | ) | ||||
fm.plain(b'\n') | fm.plain(b'\n') | ||||
fm.end() | fm.end() | ||||
@command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True) | @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True) | ||||
def debugfsinfo(ui, path=b"."): | def debugfsinfo(ui, path=b"."): | ||||
"""show information detected about current filesystem""" | """show information detected about current filesystem""" | ||||
ui.write(b'path: %s\n' % path) | ui.writenoi18n(b'path: %s\n' % path) | ||||
ui.write(b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')) | ui.writenoi18n(b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')) | ||||
ui.write(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no')) | ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no')) | ||||
ui.write(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)')) | ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)')) | ||||
ui.write(b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')) | ui.writenoi18n(b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')) | ||||
ui.write(b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')) | ui.writenoi18n(b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')) | ||||
casesensitive = b'(unknown)' | casesensitive = b'(unknown)' | ||||
try: | try: | ||||
with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f: | with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f: | ||||
casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no' | casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no' | ||||
except OSError: | except OSError: | ||||
pass | pass | ||||
ui.write(b'case-sensitive: %s\n' % casesensitive) | ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive) | ||||
@command( | @command( | ||||
b'debuggetbundle', | b'debuggetbundle', | ||||
[ | [ | ||||
(b'H', b'head', [], _(b'id of head node'), _(b'ID')), | (b'H', b'head', [], _(b'id of head node'), _(b'ID')), | ||||
(b'C', b'common', [], _(b'id of common node'), _(b'ID')), | (b'C', b'common', [], _(b'id of common node'), _(b'ID')), | ||||
( | ( | ||||
cmdutil.debugrevlogopts, | cmdutil.debugrevlogopts, | ||||
_(b'-c|-m|FILE'), | _(b'-c|-m|FILE'), | ||||
optionalrepo=True, | optionalrepo=True, | ||||
) | ) | ||||
def debugindexdot(ui, repo, file_=None, **opts): | def debugindexdot(ui, repo, file_=None, **opts): | ||||
"""dump an index DAG as a graphviz dot file""" | """dump an index DAG as a graphviz dot file""" | ||||
opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts) | r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts) | ||||
ui.write(b"digraph G {\n") | ui.writenoi18n(b"digraph G {\n") | ||||
for i in r: | for i in r: | ||||
node = r.node(i) | node = r.node(i) | ||||
pp = r.parents(node) | pp = r.parents(node) | ||||
ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | ||||
if pp[1] != nullid: | if pp[1] != nullid: | ||||
ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | ||||
ui.write(b"}\n") | ui.write(b"}\n") | ||||
if host == socket.gethostname(): | if host == socket.gethostname(): | ||||
locker = b'user %s, process %s' % (user or b'None', pid) | locker = b'user %s, process %s' % (user or b'None', pid) | ||||
else: | else: | ||||
locker = b'user %s, process %s, host %s' % ( | locker = b'user %s, process %s, host %s' % ( | ||||
user or b'None', | user or b'None', | ||||
pid, | pid, | ||||
host, | host, | ||||
) | ) | ||||
ui.write(b"%-6s %s (%ds)\n" % (name + b":", locker, age)) | ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age)) | ||||
return 1 | return 1 | ||||
except OSError as e: | except OSError as e: | ||||
if e.errno != errno.ENOENT: | if e.errno != errno.ENOENT: | ||||
raise | raise | ||||
ui.write(b"%-6s free\n" % (name + b":")) | ui.writenoi18n(b"%-6s free\n" % (name + b":")) | ||||
return 0 | return 0 | ||||
held += report(repo.svfs, b"lock", repo.lock) | held += report(repo.svfs, b"lock", repo.lock) | ||||
held += report(repo.vfs, b"wlock", repo.wlock) | held += report(repo.vfs, b"wlock", repo.wlock) | ||||
return held | return held | ||||
def _hashornull(h): | def _hashornull(h): | ||||
if h == nullhex: | if h == nullhex: | ||||
return b'null' | return b'null' | ||||
else: | else: | ||||
return h | return h | ||||
def printrecords(version): | def printrecords(version): | ||||
ui.write(b'* version %d records\n' % version) | ui.writenoi18n(b'* version %d records\n' % version) | ||||
if version == 1: | if version == 1: | ||||
records = v1records | records = v1records | ||||
else: | else: | ||||
records = v2records | records = v2records | ||||
for rtype, record in records: | for rtype, record in records: | ||||
# pretty print some record types | # pretty print some record types | ||||
if rtype == b'L': | if rtype == b'L': | ||||
ui.write(b'local: %s\n' % record) | ui.writenoi18n(b'local: %s\n' % record) | ||||
elif rtype == b'O': | elif rtype == b'O': | ||||
ui.write(b'other: %s\n' % record) | ui.writenoi18n(b'other: %s\n' % record) | ||||
elif rtype == b'm': | elif rtype == b'm': | ||||
driver, mdstate = record.split(b'\0', 1) | driver, mdstate = record.split(b'\0', 1) | ||||
ui.write(b'merge driver: %s (state "%s")\n' % (driver, mdstate)) | ui.writenoi18n(b'merge driver: %s (state "%s")\n' % (driver, mdstate)) | ||||
elif rtype in b'FDC': | elif rtype in b'FDC': | ||||
r = record.split(b'\0') | r = record.split(b'\0') | ||||
f, state, hash, lfile, afile, anode, ofile = r[0:7] | f, state, hash, lfile, afile, anode, ofile = r[0:7] | ||||
if version == 1: | if version == 1: | ||||
onode = b'not stored in v1 format' | onode = b'not stored in v1 format' | ||||
flags = r[7] | flags = r[7] | ||||
else: | else: | ||||
onode, flags = r[7:9] | onode, flags = r[7:9] | ||||
ui.write( | ui.writenoi18n( | ||||
b'file: %s (record type "%s", state "%s", hash %s)\n' | b'file: %s (record type "%s", state "%s", hash %s)\n' | ||||
% (f, rtype, state, _hashornull(hash)) | % (f, rtype, state, _hashornull(hash)) | ||||
) | ) | ||||
ui.write(b' local path: %s (flags "%s")\n' % (lfile, flags)) | ui.writenoi18n(b' local path: %s (flags "%s")\n' % (lfile, flags)) | ||||
ui.write( | ui.writenoi18n( | ||||
b' ancestor path: %s (node %s)\n' | b' ancestor path: %s (node %s)\n' | ||||
% (afile, _hashornull(anode)) | % (afile, _hashornull(anode)) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b' other path: %s (node %s)\n' | b' other path: %s (node %s)\n' | ||||
% (ofile, _hashornull(onode)) | % (ofile, _hashornull(onode)) | ||||
) | ) | ||||
elif rtype == b'f': | elif rtype == b'f': | ||||
filename, rawextras = record.split(b'\0', 1) | filename, rawextras = record.split(b'\0', 1) | ||||
extras = rawextras.split(b'\0') | extras = rawextras.split(b'\0') | ||||
i = 0 | i = 0 | ||||
extrastrings = [] | extrastrings = [] | ||||
while i < len(extras): | while i < len(extras): | ||||
extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1])) | extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1])) | ||||
i += 2 | i += 2 | ||||
ui.write( | ui.writenoi18n( | ||||
b'file extras: %s (%s)\n' | b'file extras: %s (%s)\n' | ||||
% (filename, b', '.join(extrastrings)) | % (filename, b', '.join(extrastrings)) | ||||
) | ) | ||||
elif rtype == b'l': | elif rtype == b'l': | ||||
labels = record.split(b'\0', 2) | labels = record.split(b'\0', 2) | ||||
labels = [l for l in labels if len(l) > 0] | labels = [l for l in labels if len(l) > 0] | ||||
ui.write(b'labels:\n') | ui.writenoi18n(b'labels:\n') | ||||
ui.write((b' local: %s\n' % labels[0])) | ui.write((b' local: %s\n' % labels[0])) | ||||
ui.write((b' other: %s\n' % labels[1])) | ui.write((b' other: %s\n' % labels[1])) | ||||
if len(labels) > 2: | if len(labels) > 2: | ||||
ui.write((b' base: %s\n' % labels[2])) | ui.write((b' base: %s\n' % labels[2])) | ||||
else: | else: | ||||
ui.write( | ui.writenoi18n( | ||||
b'unrecognized entry: %s\t%s\n' | b'unrecognized entry: %s\t%s\n' | ||||
% (rtype, record.replace(b'\0', b'\t')) | % (rtype, record.replace(b'\0', b'\t')) | ||||
) | ) | ||||
# Avoid mergestate.read() since it may raise an exception for unsupported | # Avoid mergestate.read() since it may raise an exception for unsupported | ||||
# merge state records. We shouldn't be doing this, but this is OK since this | # merge state records. We shouldn't be doing this, but this is OK since this | ||||
# command is pretty low-level. | # command is pretty low-level. | ||||
ms = mergemod.mergestate(repo) | ms = mergemod.mergestate(repo) | ||||
return (1, r[1]) | return (1, r[1]) | ||||
else: | else: | ||||
return (0, idx) | return (0, idx) | ||||
v1records.sort(key=key) | v1records.sort(key=key) | ||||
v2records.sort(key=key) | v2records.sort(key=key) | ||||
if not v1records and not v2records: | if not v1records and not v2records: | ||||
ui.write(b'no merge state found\n') | ui.writenoi18n(b'no merge state found\n') | ||||
elif not v2records: | elif not v2records: | ||||
ui.note(b'no version 2 merge state\n') | ui.notenoi18n(b'no version 2 merge state\n') | ||||
printrecords(1) | printrecords(1) | ||||
elif ms._v1v2match(v1records, v2records): | elif ms._v1v2match(v1records, v2records): | ||||
ui.note(b'v1 and v2 states match: using v2\n') | ui.notenoi18n(b'v1 and v2 states match: using v2\n') | ||||
printrecords(2) | printrecords(2) | ||||
else: | else: | ||||
ui.note(b'v1 and v2 states mismatch: using v1\n') | ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n') | ||||
printrecords(1) | printrecords(1) | ||||
if ui.verbose: | if ui.verbose: | ||||
printrecords(2) | printrecords(2) | ||||
@command(b'debugnamecomplete', [], _(b'NAME...')) | @command(b'debugnamecomplete', [], _(b'NAME...')) | ||||
def debugnamecomplete(ui, repo, *args): | def debugnamecomplete(ui, repo, *args): | ||||
'''complete "names" - tags, open branch names, bookmark names''' | '''complete "names" - tags, open branch names, bookmark names''' | ||||
``merge-patterns``, this command can't show any helpful | ``merge-patterns``, this command can't show any helpful | ||||
information, even with --debug. In such case, information above is | information, even with --debug. In such case, information above is | ||||
useful to know why a merge tool is chosen. | useful to know why a merge tool is chosen. | ||||
""" | """ | ||||
opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
overrides = {} | overrides = {} | ||||
if opts[b'tool']: | if opts[b'tool']: | ||||
overrides[(b'ui', b'forcemerge')] = opts[b'tool'] | overrides[(b'ui', b'forcemerge')] = opts[b'tool'] | ||||
ui.note(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool']))) | ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool']))) | ||||
with ui.configoverride(overrides, b'debugmergepatterns'): | with ui.configoverride(overrides, b'debugmergepatterns'): | ||||
hgmerge = encoding.environ.get(b"HGMERGE") | hgmerge = encoding.environ.get(b"HGMERGE") | ||||
if hgmerge is not None: | if hgmerge is not None: | ||||
ui.note(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge))) | ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge))) | ||||
uimerge = ui.config(b"ui", b"merge") | uimerge = ui.config(b"ui", b"merge") | ||||
if uimerge: | if uimerge: | ||||
ui.note(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge))) | ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge))) | ||||
ctx = scmutil.revsingle(repo, opts.get(b'rev')) | ctx = scmutil.revsingle(repo, opts.get(b'rev')) | ||||
m = scmutil.match(ctx, pats, opts) | m = scmutil.match(ctx, pats, opts) | ||||
changedelete = opts[b'changedelete'] | changedelete = opts[b'changedelete'] | ||||
for path in ctx.walk(m): | for path in ctx.walk(m): | ||||
fctx = ctx[path] | fctx = ctx[path] | ||||
try: | try: | ||||
if not ui.debugflag: | if not ui.debugflag: | ||||
return basepcfmtstr % (len(str(max)), b' ' * padding) | return basepcfmtstr % (len(str(max)), b' ' * padding) | ||||
def pcfmt(value, total): | def pcfmt(value, total): | ||||
if total: | if total: | ||||
return (value, 100 * float(value) / total) | return (value, 100 * float(value) / total) | ||||
else: | else: | ||||
return value, 100.0 | return value, 100.0 | ||||
ui.write(b'format : %d\n' % format) | ui.writenoi18n(b'format : %d\n' % format) | ||||
ui.write(b'flags : %s\n' % b', '.join(flags)) | ui.writenoi18n(b'flags : %s\n' % b', '.join(flags)) | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
fmt = pcfmtstr(totalsize) | fmt = pcfmtstr(totalsize) | ||||
fmt2 = dfmtstr(totalsize) | fmt2 = dfmtstr(totalsize) | ||||
ui.write(b'revisions : ' + fmt2 % numrevs) | ui.writenoi18n(b'revisions : ' + fmt2 % numrevs) | ||||
ui.write(b' merges : ' + fmt % pcfmt(nummerges, numrevs)) | ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs)) | ||||
ui.write(b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)) | ui.writenoi18n(b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)) | ||||
ui.write(b'revisions : ' + fmt2 % numrevs) | ui.writenoi18n(b'revisions : ' + fmt2 % numrevs) | ||||
ui.write(b' empty : ' + fmt % pcfmt(numempty, numrevs)) | ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs)) | ||||
ui.write( | ui.writenoi18n( | ||||
b' text : ' | b' text : ' | ||||
+ fmt % pcfmt(numemptytext, numemptytext + numemptydelta) | + fmt % pcfmt(numemptytext, numemptytext + numemptydelta) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b' delta : ' | b' delta : ' | ||||
+ fmt % pcfmt(numemptydelta, numemptytext + numemptydelta) | + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta) | ||||
) | ) | ||||
ui.write(b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)) | ui.writenoi18n(b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)) | ||||
for depth in sorted(numsnapdepth): | for depth in sorted(numsnapdepth): | ||||
ui.write( | ui.write( | ||||
(b' lvl-%-3d : ' % depth) | (b' lvl-%-3d : ' % depth) | ||||
+ fmt % pcfmt(numsnapdepth[depth], numrevs) | + fmt % pcfmt(numsnapdepth[depth], numrevs) | ||||
) | ) | ||||
ui.write(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs)) | ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs)) | ||||
ui.write(b'revision size : ' + fmt2 % totalsize) | ui.writenoi18n(b'revision size : ' + fmt2 % totalsize) | ||||
ui.write( | ui.writenoi18n( | ||||
b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize) | b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize) | ||||
) | ) | ||||
for depth in sorted(numsnapdepth): | for depth in sorted(numsnapdepth): | ||||
ui.write( | ui.write( | ||||
(b' lvl-%-3d : ' % depth) | (b' lvl-%-3d : ' % depth) | ||||
+ fmt % pcfmt(snaptotal[depth], totalsize) | + fmt % pcfmt(snaptotal[depth], totalsize) | ||||
) | ) | ||||
ui.write(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize)) | ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize)) | ||||
def fmtchunktype(chunktype): | def fmtchunktype(chunktype): | ||||
if chunktype == b'empty': | if chunktype == b'empty': | ||||
return b' %s : ' % chunktype | return b' %s : ' % chunktype | ||||
elif chunktype in pycompat.bytestr(string.ascii_letters): | elif chunktype in pycompat.bytestr(string.ascii_letters): | ||||
return b' 0x%s (%s) : ' % (hex(chunktype), chunktype) | return b' 0x%s (%s) : ' % (hex(chunktype), chunktype) | ||||
else: | else: | ||||
return b' 0x%s : ' % hex(chunktype) | return b' 0x%s : ' % hex(chunktype) | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
ui.write(b'chunks : ' + fmt2 % numrevs) | ui.writenoi18n(b'chunks : ' + fmt2 % numrevs) | ||||
for chunktype in sorted(chunktypecounts): | for chunktype in sorted(chunktypecounts): | ||||
ui.write(fmtchunktype(chunktype)) | ui.write(fmtchunktype(chunktype)) | ||||
ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs)) | ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs)) | ||||
ui.write(b'chunks size : ' + fmt2 % totalsize) | ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize) | ||||
for chunktype in sorted(chunktypecounts): | for chunktype in sorted(chunktypecounts): | ||||
ui.write(fmtchunktype(chunktype)) | ui.write(fmtchunktype(chunktype)) | ||||
ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize)) | ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize)) | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio)) | fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio)) | ||||
ui.write(b'avg chain length : ' + fmt % avgchainlen) | ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen) | ||||
ui.write(b'max chain length : ' + fmt % maxchainlen) | ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen) | ||||
ui.write(b'max chain reach : ' + fmt % maxchainspan) | ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan) | ||||
ui.write(b'compression ratio : ' + fmt % compratio) | ui.writenoi18n(b'compression ratio : ' + fmt % compratio) | ||||
if format > 0: | if format > 0: | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
ui.write( | ui.writenoi18n( | ||||
b'uncompressed data size (min/max/avg) : %d / %d / %d\n' | b'uncompressed data size (min/max/avg) : %d / %d / %d\n' | ||||
% tuple(datasize) | % tuple(datasize) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b'full revision size (min/max/avg) : %d / %d / %d\n' | b'full revision size (min/max/avg) : %d / %d / %d\n' | ||||
% tuple(fullsize) | % tuple(fullsize) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b'inter-snapshot size (min/max/avg) : %d / %d / %d\n' | b'inter-snapshot size (min/max/avg) : %d / %d / %d\n' | ||||
% tuple(semisize) | % tuple(semisize) | ||||
) | ) | ||||
for depth in sorted(snapsizedepth): | for depth in sorted(snapsizedepth): | ||||
if depth == 0: | if depth == 0: | ||||
continue | continue | ||||
ui.write( | ui.writenoi18n( | ||||
b' level-%-3d (min/max/avg) : %d / %d / %d\n' | b' level-%-3d (min/max/avg) : %d / %d / %d\n' | ||||
% ((depth,) + tuple(snapsizedepth[depth])) | % ((depth,) + tuple(snapsizedepth[depth])) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b'delta size (min/max/avg) : %d / %d / %d\n' | b'delta size (min/max/avg) : %d / %d / %d\n' | ||||
% tuple(deltasize) | % tuple(deltasize) | ||||
) | ) | ||||
if numdeltas > 0: | if numdeltas > 0: | ||||
ui.write(b'\n') | ui.write(b'\n') | ||||
fmt = pcfmtstr(numdeltas) | fmt = pcfmtstr(numdeltas) | ||||
fmt2 = pcfmtstr(numdeltas, 4) | fmt2 = pcfmtstr(numdeltas, 4) | ||||
ui.write(b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)) | ui.writenoi18n(b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)) | ||||
if numprev > 0: | if numprev > 0: | ||||
ui.write( | ui.writenoi18n( | ||||
b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev) | b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev) | b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev) | ||||
) | ) | ||||
ui.write( | ui.writenoi18n( | ||||
b' other : ' + fmt2 % pcfmt(numoprev, numprev) | b' other : ' + fmt2 % pcfmt(numoprev, numprev) | ||||
) | ) | ||||
if gdelta: | if gdelta: | ||||
ui.write(b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)) | ui.writenoi18n(b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)) | ||||
ui.write(b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)) | ui.writenoi18n(b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)) | ||||
ui.write( | ui.writenoi18n( | ||||
b'deltas against other : ' + fmt % pcfmt(numother, numdeltas) | b'deltas against other : ' + fmt % pcfmt(numother, numdeltas) | ||||
) | ) | ||||
@command( | @command( | ||||
b'debugrevlogindex', | b'debugrevlogindex', | ||||
cmdutil.debugrevlogopts | cmdutil.debugrevlogopts | ||||
+ [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))], | + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))], | ||||
if format == 0: | if format == 0: | ||||
if ui.verbose: | if ui.verbose: | ||||
ui.write( | ui.write( | ||||
(b" rev offset length linkrev" b" %s %s p2\n") | (b" rev offset length linkrev" b" %s %s p2\n") | ||||
% (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) | % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) | ||||
) | ) | ||||
else: | else: | ||||
ui.write( | ui.writenoi18n( | ||||
b" rev linkrev %s %s p2\n" | b" rev linkrev %s %s p2\n" | ||||
% (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) | % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) | ||||
) | ) | ||||
elif format == 1: | elif format == 1: | ||||
if ui.verbose: | if ui.verbose: | ||||
ui.write( | ui.write( | ||||
( | ( | ||||
b" rev flag offset length size link p1" | b" rev flag offset length size link p1" | ||||
b" p2 %s\n" | b" p2 %s\n" | ||||
) | ) | ||||
% b"nodeid".rjust(idlen) | % b"nodeid".rjust(idlen) | ||||
) | ) | ||||
else: | else: | ||||
ui.write( | ui.writenoi18n( | ||||
b" rev flag size link p1 p2 %s\n" | b" rev flag size link p1 p2 %s\n" | ||||
% b"nodeid".rjust(idlen) | % b"nodeid".rjust(idlen) | ||||
) | ) | ||||
for i in r: | for i in r: | ||||
node = r.node(i) | node = r.node(i) | ||||
if format == 0: | if format == 0: | ||||
try: | try: | ||||
ui.write(b"* %s:\n" % n) | ui.write(b"* %s:\n" % n) | ||||
ui.write(revsetlang.prettyformat(tree), b"\n") | ui.write(revsetlang.prettyformat(tree), b"\n") | ||||
printedtree = tree | printedtree = tree | ||||
if opts[b'verify_optimized']: | if opts[b'verify_optimized']: | ||||
arevs = revset.makematcher(treebystage[b'analyzed'])(repo) | arevs = revset.makematcher(treebystage[b'analyzed'])(repo) | ||||
brevs = revset.makematcher(treebystage[b'optimized'])(repo) | brevs = revset.makematcher(treebystage[b'optimized'])(repo) | ||||
if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): | if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): | ||||
ui.write(b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n") | ui.writenoi18n(b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n") | ||||
ui.write(b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n") | ui.writenoi18n(b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n") | ||||
arevs = list(arevs) | arevs = list(arevs) | ||||
brevs = list(brevs) | brevs = list(brevs) | ||||
if arevs == brevs: | if arevs == brevs: | ||||
return 0 | return 0 | ||||
ui.write(b'--- analyzed\n', label=b'diff.file_a') | ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a') | ||||
ui.write(b'+++ optimized\n', label=b'diff.file_b') | ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b') | ||||
sm = difflib.SequenceMatcher(None, arevs, brevs) | sm = difflib.SequenceMatcher(None, arevs, brevs) | ||||
for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | ||||
if tag in (r'delete', r'replace'): | if tag in (r'delete', r'replace'): | ||||
for c in arevs[alo:ahi]: | for c in arevs[alo:ahi]: | ||||
ui.write(b'-%d\n' % c, label=b'diff.deleted') | ui.write(b'-%d\n' % c, label=b'diff.deleted') | ||||
if tag in (r'insert', r'replace'): | if tag in (r'insert', r'replace'): | ||||
for c in brevs[blo:bhi]: | for c in brevs[blo:bhi]: | ||||
ui.write(b'+%d\n' % c, label=b'diff.inserted') | ui.write(b'+%d\n' % c, label=b'diff.inserted') | ||||
if tag == r'equal': | if tag == r'equal': | ||||
for c in arevs[alo:ahi]: | for c in arevs[alo:ahi]: | ||||
ui.write(b' %d\n' % c) | ui.write(b' %d\n' % c) | ||||
return 1 | return 1 | ||||
func = revset.makematcher(tree) | func = revset.makematcher(tree) | ||||
revs = func(repo) | revs = func(repo) | ||||
if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): | if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): | ||||
ui.write(b"* set:\n", stringutil.prettyrepr(revs), b"\n") | ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n") | ||||
if not opts[b'show_revs']: | if not opts[b'show_revs']: | ||||
return | return | ||||
for c in revs: | for c in revs: | ||||
ui.write(b"%d\n" % c) | ui.write(b"%d\n" % c) | ||||
@command( | @command( | ||||
b'debugserve', | b'debugserve', | ||||
@command( | @command( | ||||
b'debugsub', | b'debugsub', | ||||
[(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))], | [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))], | ||||
_(b'[-r REV] [REV]'), | _(b'[-r REV] [REV]'), | ||||
) | ) | ||||
def debugsub(ui, repo, rev=None): | def debugsub(ui, repo, rev=None): | ||||
ctx = scmutil.revsingle(repo, rev, None) | ctx = scmutil.revsingle(repo, rev, None) | ||||
for k, v in sorted(ctx.substate.items()): | for k, v in sorted(ctx.substate.items()): | ||||
ui.write(b'path %s\n' % k) | ui.writenoi18n(b'path %s\n' % k) | ||||
ui.write(b' source %s\n' % v[0]) | ui.writenoi18n(b' source %s\n' % v[0]) | ||||
ui.write(b' revision %s\n' % v[1]) | ui.writenoi18n(b' revision %s\n' % v[1]) | ||||
@command( | @command( | ||||
b'debugsuccessorssets', | b'debugsuccessorssets', | ||||
[(b'', b'closest', False, _(b'return closest successors sets only'))], | [(b'', b'closest', False, _(b'return closest successors sets only'))], | ||||
_(b'[REV]'), | _(b'[REV]'), | ||||
) | ) | ||||
def debugsuccessorssets(ui, repo, *revs, **opts): | def debugsuccessorssets(ui, repo, *revs, **opts): | ||||
raise error.Abort(_(b'malformed keyword definition: %s') % d) | raise error.Abort(_(b'malformed keyword definition: %s') % d) | ||||
if ui.verbose: | if ui.verbose: | ||||
aliases = ui.configitems(b'templatealias') | aliases = ui.configitems(b'templatealias') | ||||
tree = templater.parse(tmpl) | tree = templater.parse(tmpl) | ||||
ui.note(templater.prettyformat(tree), b'\n') | ui.note(templater.prettyformat(tree), b'\n') | ||||
newtree = templater.expandaliases(tree, aliases) | newtree = templater.expandaliases(tree, aliases) | ||||
if newtree != tree: | if newtree != tree: | ||||
ui.note(b"* expanded:\n", templater.prettyformat(newtree), b'\n') | ui.notenoi18n(b"* expanded:\n", templater.prettyformat(newtree), b'\n') | ||||
if revs is None: | if revs is None: | ||||
tres = formatter.templateresources(ui, repo) | tres = formatter.templateresources(ui, repo) | ||||
t = formatter.maketemplater(ui, tmpl, resources=tres) | t = formatter.maketemplater(ui, tmpl, resources=tres) | ||||
if ui.verbose: | if ui.verbose: | ||||
kwds, funcs = t.symbolsuseddefault() | kwds, funcs = t.symbolsuseddefault() | ||||
ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds))) | ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds))) | ||||
ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs))) | ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs))) | ||||
ui.write(t.renderdefault(props)) | ui.write(t.renderdefault(props)) | ||||
else: | else: | ||||
displayer = logcmdutil.maketemplater(ui, repo, tmpl) | displayer = logcmdutil.maketemplater(ui, repo, tmpl) | ||||
if ui.verbose: | if ui.verbose: | ||||
kwds, funcs = displayer.t.symbolsuseddefault() | kwds, funcs = displayer.t.symbolsuseddefault() | ||||
ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds))) | ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds))) | ||||
ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs))) | ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs))) | ||||
for r in revs: | for r in revs: | ||||
displayer.show(repo[r], **pycompat.strkwargs(props)) | displayer.show(repo[r], **pycompat.strkwargs(props)) | ||||
displayer.close() | displayer.close() | ||||
@command( | @command( | ||||
b'debuguigetpass', | b'debuguigetpass', | ||||
[(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], | [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], | ||||
_(b'[-p TEXT]'), | _(b'[-p TEXT]'), | ||||
norepo=True, | norepo=True, | ||||
) | ) | ||||
def debuguigetpass(ui, prompt=b''): | def debuguigetpass(ui, prompt=b''): | ||||
"""show prompt to type password""" | """show prompt to type password""" | ||||
r = ui.getpass(prompt) | r = ui.getpass(prompt) | ||||
ui.write(b'respose: %s\n' % r) | ui.writenoi18n(b'respose: %s\n' % r) | ||||
@command( | @command( | ||||
b'debuguiprompt', | b'debuguiprompt', | ||||
[(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], | [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], | ||||
_(b'[-p TEXT]'), | _(b'[-p TEXT]'), | ||||
norepo=True, | norepo=True, | ||||
) | ) | ||||
def debuguiprompt(ui, prompt=b''): | def debuguiprompt(ui, prompt=b''): | ||||
"""show plain prompt""" | """show plain prompt""" | ||||
r = ui.prompt(prompt) | r = ui.prompt(prompt) | ||||
ui.write(b'response: %s\n' % r) | ui.writenoi18n(b'response: %s\n' % r) | ||||
@command(b'debugupdatecaches', []) | @command(b'debugupdatecaches', []) | ||||
def debugupdatecaches(ui, repo, *pats, **opts): | def debugupdatecaches(ui, repo, *pats, **opts): | ||||
"""warm all known caches in the repository""" | """warm all known caches in the repository""" | ||||
with repo.wlock(), repo.lock(): | with repo.wlock(), repo.lock(): | ||||
repo.updatecaches(full=True) | repo.updatecaches(full=True) | ||||
@command( | @command( | ||||
b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True | b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True | ||||
) | ) | ||||
def debugwalk(ui, repo, *pats, **opts): | def debugwalk(ui, repo, *pats, **opts): | ||||
"""show how files match on given patterns""" | """show how files match on given patterns""" | ||||
opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
m = scmutil.match(repo[None], pats, opts) | m = scmutil.match(repo[None], pats, opts) | ||||
if ui.verbose: | if ui.verbose: | ||||
ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') | ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') | ||||
items = list(repo[None].walk(m)) | items = list(repo[None].walk(m)) | ||||
if not items: | if not items: | ||||
return | return | ||||
f = lambda fn: fn | f = lambda fn: fn | ||||
if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/': | if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/': | ||||
f = lambda fn: util.normpath(fn) | f = lambda fn: util.normpath(fn) | ||||
fmt = b'f %%-%ds %%-%ds %%s' % ( | fmt = b'f %%-%ds %%-%ds %%s' % ( | ||||
max([len(abs) for abs in items]), | max([len(abs) for abs in items]), |
try: | try: | ||||
while handlers: | while handlers: | ||||
func, args, kwargs = handlers.pop() | func, args, kwargs = handlers.pop() | ||||
try: | try: | ||||
func(*args, **kwargs) | func(*args, **kwargs) | ||||
except: # re-raises below | except: # re-raises below | ||||
if exc is None: | if exc is None: | ||||
exc = sys.exc_info()[1] | exc = sys.exc_info()[1] | ||||
self.ui.warn(b'error in exit handlers:\n') | self.ui.warnnoi18n(b'error in exit handlers:\n') | ||||
self.ui.traceback(force=True) | self.ui.traceback(force=True) | ||||
finally: | finally: | ||||
if exc is not None: | if exc is not None: | ||||
raise exc | raise exc | ||||
def run(): | def run(): | ||||
b"run the command in sys.argv" | b"run the command in sys.argv" |
if b'SSLKEYLOGFILE' in encoding.environ: | if b'SSLKEYLOGFILE' in encoding.environ: | ||||
try: | try: | ||||
import sslkeylog | import sslkeylog | ||||
sslkeylog.set_keylog( | sslkeylog.set_keylog( | ||||
pycompat.fsdecode(encoding.environ[b'SSLKEYLOGFILE']) | pycompat.fsdecode(encoding.environ[b'SSLKEYLOGFILE']) | ||||
) | ) | ||||
ui.warn( | ui.warnnoi18n( | ||||
b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n' | b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n' | ||||
) | ) | ||||
except ImportError: | except ImportError: | ||||
ui.warn( | ui.warnnoi18n( | ||||
b'sslkeylog module missing, ' | b'sslkeylog module missing, ' | ||||
b'but SSLKEYLOGFILE set in environment\n' | b'but SSLKEYLOGFILE set in environment\n' | ||||
) | ) | ||||
for f in (keyfile, certfile): | for f in (keyfile, certfile): | ||||
if f and not os.path.exists(f): | if f and not os.path.exists(f): | ||||
raise error.Abort( | raise error.Abort( | ||||
_(b'certificate file (%s) does not exist; cannot connect to %s') | _(b'certificate file (%s) does not exist; cannot connect to %s') |
from __future__ import absolute_import | from __future__ import absolute_import | ||||
import os | import os | ||||
from mercurial import ( | from mercurial import ( | ||||
dispatch, | dispatch, | ||||
extensions, | extensions, | ||||
ui as uimod, | ui as uimod, | ||||
) | ) | ||||
def testdispatch(cmd): | def testdispatch(cmd): | ||||
"""Simple wrapper around dispatch.dispatch() | """Simple wrapper around dispatch.dispatch() | ||||
Prints command and result value, but does not handle quoting. | Prints command and result value, but does not handle quoting. | ||||
""" | """ | ||||
ui = uimod.ui.load() | ui = uimod.ui.load() | ||||
extensions.populateui(ui) | extensions.populateui(ui) | ||||
ui.status(b"running: %s\n" % cmd) | ui.statusnoi18n(b"running: %s\n" % cmd) | ||||
req = dispatch.request(cmd.split(), ui) | req = dispatch.request(cmd.split(), ui) | ||||
result = dispatch.dispatch(req) | result = dispatch.dispatch(req) | ||||
ui.status(b"result: %r\n" % result) | ui.statusnoi18n(b"result: %r\n" % result) | ||||
# create file 'foo', add and commit | # create file 'foo', add and commit | ||||
f = open(b'foo', 'wb') | f = open(b'foo', 'wb') | ||||
f.write(b'foo\n') | f.write(b'foo\n') | ||||
f.close() | f.close() | ||||
testdispatch(b"--debug add foo") | testdispatch(b"--debug add foo") | ||||
testdispatch(b"--debug commit -m commit1 -d 2000-01-01 foo") | testdispatch(b"--debug commit -m commit1 -d 2000-01-01 foo") |
from __future__ import absolute_import, print_function | from __future__ import absolute_import, print_function | ||||
import os | import os | ||||
from mercurial import ( | from mercurial import ( | ||||
dispatch, | dispatch, | ||||
ui as uimod, | ui as uimod, | ||||
) | ) | ||||
from mercurial.utils import stringutil | from mercurial.utils import stringutil | ||||
# ensure errors aren't buffered | # ensure errors aren't buffered | ||||
testui = uimod.ui() | testui = uimod.ui() | ||||
testui.pushbuffer() | testui.pushbuffer() | ||||
testui.write(b'buffered\n') | testui.writenoi18n(b'buffered\n') | ||||
testui.warn(b'warning\n') | testui.warnnoi18n(b'warning\n') | ||||
testui.write_err(b'error\n') | testui.write_err(b'error\n') | ||||
print(stringutil.pprint(testui.popbuffer(), bprefix=True).decode('ascii')) | print(stringutil.pprint(testui.popbuffer(), bprefix=True).decode('ascii')) | ||||
# test dispatch.dispatch with the same ui object | # test dispatch.dispatch with the same ui object | ||||
hgrc = open(os.environ["HGRCPATH"], 'wb') | hgrc = open(os.environ["HGRCPATH"], 'wb') | ||||
hgrc.write(b'[extensions]\n') | hgrc.write(b'[extensions]\n') | ||||
hgrc.write(b'color=\n') | hgrc.write(b'color=\n') | ||||
hgrc.close() | hgrc.close() |