The inheritance from object is implied in Python 3. So this should
be equivalent.
This change was generated via an automated search and replace. So there
may have been some accidental changes.
( )
durin42 | |
martinvonz | |
Alphare |
hg-reviewers |
The inheritance from object is implied in Python 3. So this should
be equivalent.
This change was generated via an automated search and replace. So there
may have been some accidental changes.
No Linters Available |
No Unit Test Coverage |
Commit | Parents | Author | Summary | Date |
---|---|---|---|---|
bf2d3f5f006e | 2c020cb5c153 | Gregory Szorc | Feb 21 2022, 3:08 PM |
Status | Author | Revision | |
---|---|---|---|
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | D12339 ui: use input() directly | |
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg | ||
Closed | indygreg |
for c in cs: | for c in cs: | ||||
failandwarn = c[-1] | failandwarn = c[-1] | ||||
preparefailandwarn(failandwarn) | preparefailandwarn(failandwarn) | ||||
filters = c[-2] | filters = c[-2] | ||||
preparefilters(filters) | preparefilters(filters) | ||||
class norepeatlogger(object): | class norepeatlogger: | ||||
def __init__(self): | def __init__(self): | ||||
self._lastseen = None | self._lastseen = None | ||||
def log(self, fname, lineno, line, msg, blame): | def log(self, fname, lineno, line, msg, blame): | ||||
"""print error related a to given line of a given file. | """print error related a to given line of a given file. | ||||
The faulty line will also be printed but only once in the case | The faulty line will also be printed but only once in the case | ||||
of multiple errors. | of multiple errors. |
server = subprocess.Popen( | server = subprocess.Popen( | ||||
tonative(cmdline), stdin=subprocess.PIPE, stdout=subprocess.PIPE | tonative(cmdline), stdin=subprocess.PIPE, stdout=subprocess.PIPE | ||||
) | ) | ||||
return server | return server | ||||
class unixconnection(object): | class unixconnection: | ||||
def __init__(self, sockpath): | def __init__(self, sockpath): | ||||
self.sock = sock = socket.socket(socket.AF_UNIX) | self.sock = sock = socket.socket(socket.AF_UNIX) | ||||
sock.connect(sockpath) | sock.connect(sockpath) | ||||
self.stdin = sock.makefile('wb') | self.stdin = sock.makefile('wb') | ||||
self.stdout = sock.makefile('rb') | self.stdout = sock.makefile('rb') | ||||
def wait(self): | def wait(self): | ||||
self.stdin.close() | self.stdin.close() | ||||
self.stdout.close() | self.stdout.close() | ||||
self.sock.close() | self.sock.close() | ||||
class unixserver(object): | class unixserver: | ||||
def __init__(self, sockpath, logpath=None, repopath=None): | def __init__(self, sockpath, logpath=None, repopath=None): | ||||
self.sockpath = sockpath | self.sockpath = sockpath | ||||
cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath] | cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath] | ||||
if repopath: | if repopath: | ||||
cmdline += [b'-R', repopath] | cmdline += [b'-R', repopath] | ||||
if logpath: | if logpath: | ||||
stdout = open(logpath, 'a') | stdout = open(logpath, 'a') | ||||
stderr = subprocess.STDOUT | stderr = subprocess.STDOUT |
def getlen(ui): | def getlen(ui): | ||||
if ui.configbool(b"perf", b"stub", False): | if ui.configbool(b"perf", b"stub", False): | ||||
return lambda x: 1 | return lambda x: 1 | ||||
return len | return len | ||||
class noop(object): | class noop: | ||||
"""dummy context manager""" | """dummy context manager""" | ||||
def __enter__(self): | def __enter__(self): | ||||
pass | pass | ||||
def __exit__(self, *args): | def __exit__(self, *args): | ||||
pass | pass | ||||
if uiformatter: | if uiformatter: | ||||
fm = uiformatter(b'perf', opts) | fm = uiformatter(b'perf', opts) | ||||
else: | else: | ||||
# for "historical portability": | # for "historical portability": | ||||
# define formatter locally, because ui.formatter has been | # define formatter locally, because ui.formatter has been | ||||
# available since 2.2 (or ae5f92e154d3) | # available since 2.2 (or ae5f92e154d3) | ||||
from mercurial import node | from mercurial import node | ||||
class defaultformatter(object): | class defaultformatter: | ||||
"""Minimized composition of baseformatter and plainformatter""" | """Minimized composition of baseformatter and plainformatter""" | ||||
def __init__(self, ui, topic, opts): | def __init__(self, ui, topic, opts): | ||||
self._ui = ui | self._ui = ui | ||||
if ui.debugflag: | if ui.debugflag: | ||||
self.hexfunc = node.hex | self.hexfunc = node.hex | ||||
else: | else: | ||||
self.hexfunc = node.short | self.hexfunc = node.short | ||||
b"missing attribute %s of %s might break assumption" | b"missing attribute %s of %s might break assumption" | ||||
b" of performance measurement" | b" of performance measurement" | ||||
) | ) | ||||
% (name, obj) | % (name, obj) | ||||
) | ) | ||||
origvalue = getattr(obj, _sysstr(name)) | origvalue = getattr(obj, _sysstr(name)) | ||||
class attrutil(object): | class attrutil: | ||||
def set(self, newvalue): | def set(self, newvalue): | ||||
setattr(obj, _sysstr(name), newvalue) | setattr(obj, _sysstr(name), newvalue) | ||||
def restore(self): | def restore(self): | ||||
setattr(obj, _sysstr(name), origvalue) | setattr(obj, _sysstr(name), origvalue) | ||||
return attrutil() | return attrutil() | ||||
fm, | fm, | ||||
totaltime, | totaltime, | ||||
title="total time (%d revs)" % resultcount, | title="total time (%d revs)" % resultcount, | ||||
displayall=displayall, | displayall=displayall, | ||||
) | ) | ||||
fm.end() | fm.end() | ||||
class _faketr(object): | class _faketr: | ||||
def add(s, x, y, z=None): | def add(s, x, y, z=None): | ||||
return None | return None | ||||
def _timeonewrite( | def _timeonewrite( | ||||
ui, | ui, | ||||
orig, | orig, | ||||
source, | source, |
def writeerr(data): | def writeerr(data): | ||||
# write "data" in BYTES into stderr | # write "data" in BYTES into stderr | ||||
sys.stderr.write(data) | sys.stderr.write(data) | ||||
#################### | #################### | ||||
class embeddedmatcher(object): # pytype: disable=ignored-metaclass | class embeddedmatcher: # pytype: disable=ignored-metaclass | ||||
"""Base class to detect embedded code fragments in *.t test script""" | """Base class to detect embedded code fragments in *.t test script""" | ||||
__metaclass__ = abc.ABCMeta | __metaclass__ = abc.ABCMeta | ||||
def __init__(self, desc): | def __init__(self, desc): | ||||
self.desc = desc | self.desc = desc | ||||
@abc.abstractmethod | @abc.abstractmethod | ||||
:filename: a name of embedded code, if it is explicitly specified | :filename: a name of embedded code, if it is explicitly specified | ||||
(e.g. "foobar" of "cat >> foobar <<EOF"). | (e.g. "foobar" of "cat >> foobar <<EOF"). | ||||
Otherwise, this is None | Otherwise, this is None | ||||
:starts: line number (1-origin), at which embedded code starts (inclusive) | :starts: line number (1-origin), at which embedded code starts (inclusive) | ||||
:ends: line number (1-origin), at which embedded code ends (exclusive) | :ends: line number (1-origin), at which embedded code ends (exclusive) | ||||
:code: extracted embedded code, which is single-stringified | :code: extracted embedded code, which is single-stringified | ||||
>>> class ambigmatcher(object): | >>> class ambigmatcher: | ||||
... # mock matcher class to examine implementation of | ... # mock matcher class to examine implementation of | ||||
... # "ambiguous matching" corner case | ... # "ambiguous matching" corner case | ||||
... def __init__(self, desc, matchfunc): | ... def __init__(self, desc, matchfunc): | ||||
... self.desc = desc | ... self.desc = desc | ||||
... self.matchfunc = matchfunc | ... self.matchfunc = matchfunc | ||||
... def startsat(self, line): | ... def startsat(self, line): | ||||
... return self.matchfunc(line) | ... return self.matchfunc(line) | ||||
>>> ambig1 = ambigmatcher('ambiguous #1', | >>> ambig1 = ambigmatcher('ambiguous #1', |
self.translator_class = Translator | self.translator_class = Translator | ||||
def translate(self): | def translate(self): | ||||
visitor = self.translator_class(self.document) | visitor = self.translator_class(self.document) | ||||
self.document.walkabout(visitor) | self.document.walkabout(visitor) | ||||
self.output = visitor.astext() | self.output = visitor.astext() | ||||
class Table(object): | class Table: | ||||
def __init__(self): | def __init__(self): | ||||
self._rows = [] | self._rows = [] | ||||
self._options = ['center'] | self._options = ['center'] | ||||
self._tab_char = '\t' | self._tab_char = '\t' | ||||
self._coldefs = [] | self._coldefs = [] | ||||
def new_row(self): | def new_row(self): | ||||
self._rows.append([]) | self._rows.append([]) | ||||
text = '\\&' + text | text = '\\&' + text | ||||
text = text.replace('\n.', '\n\\&.') | text = text.replace('\n.', '\n\\&.') | ||||
self.body.append(text) | self.body.append(text) | ||||
def depart_Text(self, node): | def depart_Text(self, node): | ||||
pass | pass | ||||
def list_start(self, node): | def list_start(self, node): | ||||
class enum_char(object): | class enum_char: | ||||
enum_style = { | enum_style = { | ||||
'bullet': '\\(bu', | 'bullet': '\\(bu', | ||||
'emdash': '\\(em', | 'emdash': '\\(em', | ||||
} | } | ||||
def __init__(self, style): | def __init__(self, style): | ||||
self._style = style | self._style = style | ||||
if 'start' in node: | if 'start' in node: |
nameroot = hgextname.split('.', 1)[0] | nameroot = hgextname.split('.', 1)[0] | ||||
contextroot = globals.get('__name__', '').split('.', 1)[0] | contextroot = globals.get('__name__', '').split('.', 1)[0] | ||||
if nameroot != contextroot: | if nameroot != contextroot: | ||||
raise | raise | ||||
# retry to import with "hgext_" prefix | # retry to import with "hgext_" prefix | ||||
return importfunc(hgextname, globals, *args, **kwargs) | return importfunc(hgextname, globals, *args, **kwargs) | ||||
class _demandmod(object): | class _demandmod: | ||||
"""module demand-loader and proxy | """module demand-loader and proxy | ||||
Specify 1 as 'level' argument at construction, to import module | Specify 1 as 'level' argument at construction, to import module | ||||
relatively. | relatively. | ||||
""" | """ | ||||
def __init__(self, name, globals, locals, level): | def __init__(self, name, globals, locals, level): | ||||
if '.' in name: | if '.' in name: |
"""Make the module load lazily.""" | """Make the module load lazily.""" | ||||
with tracing.log('demandimport %s', module): | with tracing.log('demandimport %s', module): | ||||
if _deactivated or module.__name__ in ignores: | if _deactivated or module.__name__ in ignores: | ||||
self.loader.exec_module(module) | self.loader.exec_module(module) | ||||
else: | else: | ||||
super().exec_module(module) | super().exec_module(module) | ||||
class LazyFinder(object): | class LazyFinder: | ||||
"""A wrapper around a ``MetaPathFinder`` that makes loaders lazy. | """A wrapper around a ``MetaPathFinder`` that makes loaders lazy. | ||||
``sys.meta_path`` finders have their ``find_spec()`` called to locate a | ``sys.meta_path`` finders have their ``find_spec()`` called to locate a | ||||
module. This returns a ``ModuleSpec`` if found or ``None``. The | module. This returns a ``ModuleSpec`` if found or ``None``. The | ||||
``ModuleSpec`` has a ``loader`` attribute, which is called to actually | ``ModuleSpec`` has a ``loader`` attribute, which is called to actually | ||||
load a module. | load a module. | ||||
Our class wraps an existing finder and overloads its ``find_spec()`` to | Our class wraps an existing finder and overloads its ``find_spec()`` to |
b'absorb.description': b'yellow', | b'absorb.description': b'yellow', | ||||
b'absorb.node': b'blue bold', | b'absorb.node': b'blue bold', | ||||
b'absorb.path': b'bold', | b'absorb.path': b'bold', | ||||
} | } | ||||
defaultdict = collections.defaultdict | defaultdict = collections.defaultdict | ||||
class nullui(object): | class nullui: | ||||
"""blank ui object doing nothing""" | """blank ui object doing nothing""" | ||||
debugflag = False | debugflag = False | ||||
verbose = False | verbose = False | ||||
quiet = True | quiet = True | ||||
def __getitem__(name): | def __getitem__(name): | ||||
def nullfunc(*args, **kwds): | def nullfunc(*args, **kwds): | ||||
return | return | ||||
return nullfunc | return nullfunc | ||||
class emptyfilecontext(object): | class emptyfilecontext: | ||||
"""minimal filecontext representing an empty file""" | """minimal filecontext representing an empty file""" | ||||
def __init__(self, repo): | def __init__(self, repo): | ||||
self._repo = repo | self._repo = repo | ||||
def data(self): | def data(self): | ||||
return b'' | return b'' | ||||
filectxfn=store, | filectxfn=store, | ||||
user=user, | user=user, | ||||
date=date, | date=date, | ||||
branch=None, | branch=None, | ||||
extra=extra, | extra=extra, | ||||
) | ) | ||||
class filefixupstate(object): | class filefixupstate: | ||||
"""state needed to apply fixups to a single file | """state needed to apply fixups to a single file | ||||
internally, it keeps file contents of several revisions and a linelog. | internally, it keeps file contents of several revisions and a linelog. | ||||
the linelog uses odd revision numbers for original contents (fctxs passed | the linelog uses odd revision numbers for original contents (fctxs passed | ||||
to __init__), and even revision numbers for fixups, like: | to __init__), and even revision numbers for fixups, like: | ||||
linelog rev 1: self.fctxs[0] (from an immutable "public" changeset) | linelog rev 1: self.fctxs[0] (from an immutable "public" changeset) | ||||
bidxs[i - b1], | bidxs[i - b1], | ||||
b'+', | b'+', | ||||
trim(blines[i]), | trim(blines[i]), | ||||
b'inserted', | b'inserted', | ||||
b'diff.inserted', | b'diff.inserted', | ||||
) | ) | ||||
class fixupstate(object): | class fixupstate: | ||||
"""state needed to run absorb | """state needed to run absorb | ||||
internally, it keeps paths and filefixupstates. | internally, it keeps paths and filefixupstates. | ||||
a typical use is like filefixupstates: | a typical use is like filefixupstates: | ||||
1. call diffwith, to calculate fixups | 1. call diffwith, to calculate fixups | ||||
2. (optionally), present fixups to the user, or edit fixups | 2. (optionally), present fixups to the user, or edit fixups |
b'ignore', | b'ignore', | ||||
default=lambda: [b'chgserver', b'cmdserver', b'extension'], | default=lambda: [b'chgserver', b'cmdserver', b'extension'], | ||||
) | ) | ||||
configitem(b'blackbox', b'date-format', default=b'') | configitem(b'blackbox', b'date-format', default=b'') | ||||
_lastlogger = loggingutil.proxylogger() | _lastlogger = loggingutil.proxylogger() | ||||
class blackboxlogger(object): | class blackboxlogger: | ||||
def __init__(self, ui, repo): | def __init__(self, ui, repo): | ||||
self._repo = repo | self._repo = repo | ||||
self._trackedevents = set(ui.configlist(b'blackbox', b'track')) | self._trackedevents = set(ui.configlist(b'blackbox', b'track')) | ||||
self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore')) | self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore')) | ||||
self._maxfiles = ui.configint(b'blackbox', b'maxfiles') | self._maxfiles = ui.configint(b'blackbox', b'maxfiles') | ||||
self._maxsize = ui.configbytes(b'blackbox', b'maxsize') | self._maxsize = ui.configbytes(b'blackbox', b'maxsize') | ||||
self._inlog = False | self._inlog = False | ||||
) | ) | ||||
configitem( | configitem( | ||||
b'bugzilla', | b'bugzilla', | ||||
b'version', | b'version', | ||||
default=None, | default=None, | ||||
) | ) | ||||
class bzaccess(object): | class bzaccess: | ||||
'''Base class for access to Bugzilla.''' | '''Base class for access to Bugzilla.''' | ||||
def __init__(self, ui): | def __init__(self, ui): | ||||
self.ui = ui | self.ui = ui | ||||
usermap = self.ui.config(b'bugzilla', b'usermap') | usermap = self.ui.config(b'bugzilla', b'usermap') | ||||
if usermap: | if usermap: | ||||
self.ui.readconfig(usermap, sections=[b'usermap']) | self.ui.readconfig(usermap, sections=[b'usermap']) | ||||
if len(ids) != 1: | if len(ids) != 1: | ||||
raise error.Abort(_(b'unknown database schema')) | raise error.Abort(_(b'unknown database schema')) | ||||
return ids[0][0] | return ids[0][0] | ||||
# Bugzilla via XMLRPC interface. | # Bugzilla via XMLRPC interface. | ||||
class cookietransportrequest(object): | class cookietransportrequest: | ||||
"""A Transport request method that retains cookies over its lifetime. | """A Transport request method that retains cookies over its lifetime. | ||||
The regular xmlrpclib transports ignore cookies. Which causes | The regular xmlrpclib transports ignore cookies. Which causes | ||||
a bit of a problem when you need a cookie-based login, as with | a bit of a problem when you need a cookie-based login, as with | ||||
the Bugzilla XMLRPC interface prior to 4.4.3. | the Bugzilla XMLRPC interface prior to 4.4.3. | ||||
So this is a helper for defining a Transport which looks for | So this is a helper for defining a Transport which looks for | ||||
cookies being set in responses and saves them to add to all future | cookies being set in responses and saves them to add to all future | ||||
"""Force sending of Bugzilla notification emails. | """Force sending of Bugzilla notification emails. | ||||
Only required if the access method does not trigger notification | Only required if the access method does not trigger notification | ||||
emails automatically. | emails automatically. | ||||
""" | """ | ||||
pass | pass | ||||
class bugzilla(object): | class bugzilla: | ||||
# supported versions of bugzilla. different versions have | # supported versions of bugzilla. different versions have | ||||
# different schemas. | # different schemas. | ||||
_versions = { | _versions = { | ||||
b'2.16': bzmysql, | b'2.16': bzmysql, | ||||
b'2.18': bzmysql_2_18, | b'2.18': bzmysql_2_18, | ||||
b'3.0': bzmysql_3_0, | b'3.0': bzmysql_3_0, | ||||
b'xmlrpc': bzxmlrpc, | b'xmlrpc': bzxmlrpc, | ||||
b'xmlrpc+email': bzxmlrpcemail, | b'xmlrpc+email': bzxmlrpcemail, |
def _encodeornone(d): | def _encodeornone(d): | ||||
if d is None: | if d is None: | ||||
return | return | ||||
return d.encode('latin1') | return d.encode('latin1') | ||||
class _shlexpy3proxy(object): | class _shlexpy3proxy: | ||||
def __init__(self, l): | def __init__(self, l): | ||||
self._l = l | self._l = l | ||||
def __iter__(self): | def __iter__(self): | ||||
return (_encodeornone(v) for v in self._l) | return (_encodeornone(v) for v in self._l) | ||||
def get_token(self): | def get_token(self): | ||||
return _encodeornone(self._l.get_token()) | return _encodeornone(self._l.get_token()) | ||||
class NoRepo(Exception): | class NoRepo(Exception): | ||||
pass | pass | ||||
SKIPREV = b'SKIP' | SKIPREV = b'SKIP' | ||||
class commit(object): | class commit: | ||||
def __init__( | def __init__( | ||||
self, | self, | ||||
author, | author, | ||||
date, | date, | ||||
desc, | desc, | ||||
parents, | parents, | ||||
branch=None, | branch=None, | ||||
rev=None, | rev=None, | ||||
self.rev = rev | self.rev = rev | ||||
self.extra = extra or {} | self.extra = extra or {} | ||||
self.sortkey = sortkey | self.sortkey = sortkey | ||||
self.saverev = saverev | self.saverev = saverev | ||||
self.phase = phase | self.phase = phase | ||||
self.ctx = ctx # for hg to hg conversions | self.ctx = ctx # for hg to hg conversions | ||||
class converter_source(object): | class converter_source: | ||||
"""Conversion source interface""" | """Conversion source interface""" | ||||
def __init__(self, ui, repotype, path=None, revs=None): | def __init__(self, ui, repotype, path=None, revs=None): | ||||
"""Initialize conversion source (or raise NoRepo("message") | """Initialize conversion source (or raise NoRepo("message") | ||||
exception if path is not a valid repository)""" | exception if path is not a valid repository)""" | ||||
self.ui = ui | self.ui = ui | ||||
self.path = path | self.path = path | ||||
self.revs = revs | self.revs = revs | ||||
def checkrevformat(self, revstr, mapname=b'splicemap'): | def checkrevformat(self, revstr, mapname=b'splicemap'): | ||||
"""revstr is a string that describes a revision in the given | """revstr is a string that describes a revision in the given | ||||
source control system. Return true if revstr has correct | source control system. Return true if revstr has correct | ||||
format. | format. | ||||
""" | """ | ||||
return True | return True | ||||
class converter_sink(object): | class converter_sink: | ||||
"""Conversion sink (target) interface""" | """Conversion sink (target) interface""" | ||||
def __init__(self, ui, repotype, path): | def __init__(self, ui, repotype, path): | ||||
"""Initialize conversion sink (or raise NoRepo("message") | """Initialize conversion sink (or raise NoRepo("message") | ||||
exception if path is not a valid repository) | exception if path is not a valid repository) | ||||
created is a list of paths to remove if a fatal error occurs | created is a list of paths to remove if a fatal error occurs | ||||
later""" | later""" | ||||
def hascommitforsplicemap(self, rev): | def hascommitforsplicemap(self, rev): | ||||
"""This method is for the special needs for splicemap handling and not | """This method is for the special needs for splicemap handling and not | ||||
for general use. Returns True if the sink contains rev, aborts on some | for general use. Returns True if the sink contains rev, aborts on some | ||||
special cases.""" | special cases.""" | ||||
raise NotImplementedError | raise NotImplementedError | ||||
class commandline(object): | class commandline: | ||||
def __init__(self, ui, command): | def __init__(self, ui, command): | ||||
self.ui = ui | self.ui = ui | ||||
self.command = command | self.command = command | ||||
def prerun(self): | def prerun(self): | ||||
pass | pass | ||||
def postrun(self): | def postrun(self): |
return sink(ui, name, path) | return sink(ui, name, path) | ||||
except NoRepo as inst: | except NoRepo as inst: | ||||
ui.note(_(b"convert: %s\n") % inst) | ui.note(_(b"convert: %s\n") % inst) | ||||
except MissingTool as inst: | except MissingTool as inst: | ||||
raise error.Abort(b'%s\n' % inst) | raise error.Abort(b'%s\n' % inst) | ||||
raise error.Abort(_(b'%s: unknown repository type') % path) | raise error.Abort(_(b'%s: unknown repository type') % path) | ||||
class progresssource(object): | class progresssource: | ||||
def __init__(self, ui, source, filecount): | def __init__(self, ui, source, filecount): | ||||
self.ui = ui | self.ui = ui | ||||
self.source = source | self.source = source | ||||
self.progress = ui.makeprogress( | self.progress = ui.makeprogress( | ||||
_(b'getting files'), unit=_(b'files'), total=filecount | _(b'getting files'), unit=_(b'files'), total=filecount | ||||
) | ) | ||||
def getfile(self, file, rev): | def getfile(self, file, rev): | ||||
self.progress.increment(item=file) | self.progress.increment(item=file) | ||||
return self.source.getfile(file, rev) | return self.source.getfile(file, rev) | ||||
def targetfilebelongstosource(self, targetfilename): | def targetfilebelongstosource(self, targetfilename): | ||||
return self.source.targetfilebelongstosource(targetfilename) | return self.source.targetfilebelongstosource(targetfilename) | ||||
def lookuprev(self, rev): | def lookuprev(self, rev): | ||||
return self.source.lookuprev(rev) | return self.source.lookuprev(rev) | ||||
def close(self): | def close(self): | ||||
self.progress.complete() | self.progress.complete() | ||||
class converter(object): | class converter: | ||||
def __init__(self, ui, source, dest, revmapfile, opts): | def __init__(self, ui, source, dest, revmapfile, opts): | ||||
self.source = source | self.source = source | ||||
self.dest = dest | self.dest = dest | ||||
self.ui = ui | self.ui = ui | ||||
self.opts = opts | self.opts = opts | ||||
self.commitcache = {} | self.commitcache = {} | ||||
self.authors = {} | self.authors = {} |
) | ) | ||||
from mercurial.utils import ( | from mercurial.utils import ( | ||||
dateutil, | dateutil, | ||||
procutil, | procutil, | ||||
stringutil, | stringutil, | ||||
) | ) | ||||
class logentry(object): | class logentry: | ||||
"""Class logentry has the following attributes: | """Class logentry has the following attributes: | ||||
.author - author name as CVS knows it | .author - author name as CVS knows it | ||||
.branch - name of branch this revision is on | .branch - name of branch this revision is on | ||||
.branches - revision tuple of branches starting at this revision | .branches - revision tuple of branches starting at this revision | ||||
.comment - commit message | .comment - commit message | ||||
.commitid - CVS commitid or None | .commitid - CVS commitid or None | ||||
.date - the commit date as a (time, tz) tuple | .date - the commit date as a (time, tz) tuple | ||||
.dead - true if file revision is dead | .dead - true if file revision is dead | ||||
hint=_(b'check convert.cvsps.logencoding configuration'), | hint=_(b'check convert.cvsps.logencoding configuration'), | ||||
) | ) | ||||
hook.hook(ui, None, b"cvslog", True, log=log) | hook.hook(ui, None, b"cvslog", True, log=log) | ||||
return log | return log | ||||
class changeset(object): | class changeset: | ||||
"""Class changeset has the following attributes: | """Class changeset has the following attributes: | ||||
.id - integer identifying this changeset (list index) | .id - integer identifying this changeset (list index) | ||||
.author - author name as CVS knows it | .author - author name as CVS knows it | ||||
.branch - name of branch this changeset is on, or None | .branch - name of branch this changeset is on, or None | ||||
.comment - commit message | .comment - commit message | ||||
.commitid - CVS commitid or None | .commitid - CVS commitid or None | ||||
.date - the commit date as a (time,tz) tuple | .date - the commit date as a (time,tz) tuple | ||||
.entries - list of logentry objects in this changeset | .entries - list of logentry objects in this changeset |
def normalize(path): | def normalize(path): | ||||
"""We use posixpath.normpath to support cross-platform path format. | """We use posixpath.normpath to support cross-platform path format. | ||||
However, it doesn't handle None input. So we wrap it up.""" | However, it doesn't handle None input. So we wrap it up.""" | ||||
if path is None: | if path is None: | ||||
return None | return None | ||||
return posixpath.normpath(path) | return posixpath.normpath(path) | ||||
class filemapper(object): | class filemapper: | ||||
"""Map and filter filenames when importing. | """Map and filter filenames when importing. | ||||
A name can be mapped to itself, a new name, or None (omit from new | A name can be mapped to itself, a new name, or None (omit from new | ||||
repository).""" | repository).""" | ||||
def __init__(self, ui, path=None): | def __init__(self, ui, path=None): | ||||
self.ui = ui | self.ui = ui | ||||
self.include = {} | self.include = {} | ||||
self.exclude = {} | self.exclude = {} |
error, | error, | ||||
pycompat, | pycompat, | ||||
util, | util, | ||||
) | ) | ||||
from . import common | from . import common | ||||
class submodule(object): | class submodule: | ||||
def __init__(self, path, node, url): | def __init__(self, path, node, url): | ||||
self.path = path | self.path = path | ||||
self.node = node | self.node = node | ||||
self.url = url | self.url = url | ||||
def hgsub(self): | def hgsub(self): | ||||
return b"%s = [git]%s" % (self.path, self.url) | return b"%s = [git]%s" % (self.path, self.url) | ||||
from mercurial.utils import ( | from mercurial.utils import ( | ||||
dateutil, | dateutil, | ||||
procutil, | procutil, | ||||
) | ) | ||||
from . import common | from . import common | ||||
class gnuarch_source(common.converter_source, common.commandline): | class gnuarch_source(common.converter_source, common.commandline): | ||||
class gnuarch_rev(object): | class gnuarch_rev: | ||||
def __init__(self, rev): | def __init__(self, rev): | ||||
self.rev = rev | self.rev = rev | ||||
self.summary = b'' | self.summary = b'' | ||||
self.date = None | self.date = None | ||||
self.author = b'' | self.author = b'' | ||||
self.continuationof = None | self.continuationof = None | ||||
self.add_files = [] | self.add_files = [] | ||||
self.mod_files = [] | self.mod_files = [] |
def optrev(number): | def optrev(number): | ||||
optrev = svn.core.svn_opt_revision_t() | optrev = svn.core.svn_opt_revision_t() | ||||
optrev.kind = svn.core.svn_opt_revision_number | optrev.kind = svn.core.svn_opt_revision_number | ||||
optrev.value.number = number | optrev.value.number = number | ||||
return optrev | return optrev | ||||
class changedpath(object): | class changedpath: | ||||
def __init__(self, p): | def __init__(self, p): | ||||
self.copyfrom_path = p.copyfrom_path | self.copyfrom_path = p.copyfrom_path | ||||
self.copyfrom_rev = p.copyfrom_rev | self.copyfrom_rev = p.copyfrom_rev | ||||
self.action = p.action | self.action = p.action | ||||
def get_log_child( | def get_log_child( | ||||
fp, | fp, | ||||
raise error.Abort( | raise error.Abort( | ||||
_(b'debugsvnlog could not load Subversion python bindings') | _(b'debugsvnlog could not load Subversion python bindings') | ||||
) | ) | ||||
args = decodeargs(ui.fin.read()) | args = decodeargs(ui.fin.read()) | ||||
get_log_child(ui.fout, *args) | get_log_child(ui.fout, *args) | ||||
class logstream(object): | class logstream: | ||||
"""Interruptible revision log iterator.""" | """Interruptible revision log iterator.""" | ||||
def __init__(self, stdout): | def __init__(self, stdout): | ||||
self._stdout = stdout | self._stdout = stdout | ||||
def __iter__(self): | def __iter__(self): | ||||
while True: | while True: | ||||
try: | try: |
return svn.core.svn_auth_open(providers, pool) | return svn.core.svn_auth_open(providers, pool) | ||||
class NotBranchError(SubversionException): | class NotBranchError(SubversionException): | ||||
pass | pass | ||||
class SvnRaTransport(object): | class SvnRaTransport: | ||||
""" | """ | ||||
Open an ra connection to a Subversion repository. | Open an ra connection to a Subversion repository. | ||||
""" | """ | ||||
def __init__(self, url=b"", ra=None): | def __init__(self, url=b"", ra=None): | ||||
self.pool = Pool() | self.pool = Pool() | ||||
self.svn_url = url | self.svn_url = url | ||||
self.username = b'' | self.username = b'' | ||||
svn.core.SVN_ERR_BAD_URL, | svn.core.SVN_ERR_BAD_URL, | ||||
): | ): | ||||
raise NotBranchError(url) | raise NotBranchError(url) | ||||
raise | raise | ||||
else: | else: | ||||
self.ra = ra | self.ra = ra | ||||
svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) | svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) | ||||
class Reporter(object): | class Reporter: | ||||
def __init__(self, reporter_data): | def __init__(self, reporter_data): | ||||
self._reporter, self._baton = reporter_data | self._reporter, self._baton = reporter_data | ||||
def set_path(self, path, revnum, start_empty, lock_token, pool=None): | def set_path(self, path, revnum, start_empty, lock_token, pool=None): | ||||
svn.ra.reporter2_invoke_set_path( | svn.ra.reporter2_invoke_set_path( | ||||
self._reporter, | self._reporter, | ||||
self._baton, | self._baton, | ||||
path, | path, |
b'to-crlf': tocrlf, | b'to-crlf': tocrlf, | ||||
b'is-binary': isbinary, | b'is-binary': isbinary, | ||||
# The following provide backwards compatibility with win32text | # The following provide backwards compatibility with win32text | ||||
b'cleverencode:': tolf, | b'cleverencode:': tolf, | ||||
b'cleverdecode:': tocrlf, | b'cleverdecode:': tocrlf, | ||||
} | } | ||||
class eolfile(object): | class eolfile: | ||||
def __init__(self, ui, root, data): | def __init__(self, ui, root, data): | ||||
self._decode = { | self._decode = { | ||||
b'LF': b'to-lf', | b'LF': b'to-lf', | ||||
b'CRLF': b'to-crlf', | b'CRLF': b'to-crlf', | ||||
b'BIN': b'is-binary', | b'BIN': b'is-binary', | ||||
} | } | ||||
self._encode = { | self._encode = { | ||||
b'LF': b'to-lf', | b'LF': b'to-lf', |
option = opts.get(b'option') | option = opts.get(b'option') | ||||
if not program: | if not program: | ||||
program = b'diff' | program = b'diff' | ||||
option = option or [b'-Npru'] | option = option or [b'-Npru'] | ||||
cmdline = b' '.join(map(procutil.shellquote, [program] + option)) | cmdline = b' '.join(map(procutil.shellquote, [program] + option)) | ||||
return dodiff(ui, repo, cmdline, pats, opts) | return dodiff(ui, repo, cmdline, pats, opts) | ||||
class savedcmd(object): | class savedcmd: | ||||
"""use external program to diff repository (or selected files) | """use external program to diff repository (or selected files) | ||||
Show differences between revisions for the specified files, using | Show differences between revisions for the specified files, using | ||||
the following program:: | the following program:: | ||||
%(path)s | %(path)s | ||||
When two revision arguments are given, then changes are shown | When two revision arguments are given, then changes are shown |
sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults) | sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults) | ||||
) | ) | ||||
return hex(hashutil.sha1(diffoptstr).digest())[:6] | return hex(hashutil.sha1(diffoptstr).digest())[:6] | ||||
_defaultdiffopthash = hashdiffopts(mdiff.defaultopts) | _defaultdiffopthash = hashdiffopts(mdiff.defaultopts) | ||||
class annotateopts(object): | class annotateopts: | ||||
"""like mercurial.mdiff.diffopts, but is for annotate | """like mercurial.mdiff.diffopts, but is for annotate | ||||
followrename: follow renames, like "hg annotate -f" | followrename: follow renames, like "hg annotate -f" | ||||
followmerge: follow p2 of a merge changeset, otherwise p2 is ignored | followmerge: follow p2 of a merge changeset, otherwise p2 is ignored | ||||
""" | """ | ||||
defaults = { | defaults = { | ||||
b'diffopts': None, | b'diffopts': None, | ||||
if diffopthash != _defaultdiffopthash: | if diffopthash != _defaultdiffopthash: | ||||
result += b'i' + diffopthash | result += b'i' + diffopthash | ||||
return result or b'default' | return result or b'default' | ||||
defaultopts = annotateopts() | defaultopts = annotateopts() | ||||
class _annotatecontext(object): | class _annotatecontext: | ||||
"""do not use this class directly as it does not use lock to protect | """do not use this class directly as it does not use lock to protect | ||||
writes. use "with annotatecontext(...)" instead. | writes. use "with annotatecontext(...)" instead. | ||||
""" | """ | ||||
def __init__(self, repo, path, linelogpath, revmappath, opts): | def __init__(self, repo, path, linelogpath, revmappath, opts): | ||||
self.repo = repo | self.repo = repo | ||||
self.ui = repo.ui | self.ui = repo.ui | ||||
self.path = path | self.path = path | ||||
"""silent, best-effort unlink""" | """silent, best-effort unlink""" | ||||
for path in paths: | for path in paths: | ||||
try: | try: | ||||
util.unlink(path) | util.unlink(path) | ||||
except OSError: | except OSError: | ||||
pass | pass | ||||
class pathhelper(object): | class pathhelper: | ||||
"""helper for getting paths for lockfile, linelog and revmap""" | """helper for getting paths for lockfile, linelog and revmap""" | ||||
def __init__(self, repo, path, opts=defaultopts): | def __init__(self, repo, path, opts=defaultopts): | ||||
# different options use different directories | # different options use different directories | ||||
self._vfspath = os.path.join( | self._vfspath = os.path.join( | ||||
b'fastannotate', opts.shortstr, encodedir(path) | b'fastannotate', opts.shortstr, encodedir(path) | ||||
) | ) | ||||
self._repo = repo | self._repo = repo |
pycompat, | pycompat, | ||||
templatefilters, | templatefilters, | ||||
util, | util, | ||||
) | ) | ||||
from mercurial.utils import dateutil | from mercurial.utils import dateutil | ||||
# imitating mercurial.commands.annotate, not using the vanilla formatter since | # imitating mercurial.commands.annotate, not using the vanilla formatter since | ||||
# the data structures are a bit different, and we have some fast paths. | # the data structures are a bit different, and we have some fast paths. | ||||
class defaultformatter(object): | class defaultformatter: | ||||
"""the default formatter that does leftpad and support some common flags""" | """the default formatter that does leftpad and support some common flags""" | ||||
def __init__(self, ui, repo, opts): | def __init__(self, ui, repo, opts): | ||||
self.ui = ui | self.ui = ui | ||||
self.opts = opts | self.opts = opts | ||||
if ui.quiet: | if ui.quiet: | ||||
datefunc = dateutil.shortdate | datefunc = dateutil.shortdate |
# whether the changeset changes the file path (ie. is a rename) | # whether the changeset changes the file path (ie. is a rename) | ||||
renameflag = 2 | renameflag = 2 | ||||
# len(mercurial.node.nullid) | # len(mercurial.node.nullid) | ||||
_hshlen = 20 | _hshlen = 20 | ||||
class revmap(object): | class revmap: | ||||
"""trivial hg bin hash - linelog rev bidirectional map | """trivial hg bin hash - linelog rev bidirectional map | ||||
also stores a flag (uint8) for each revision, and track renames. | also stores a flag (uint8) for each revision, and track renames. | ||||
""" | """ | ||||
HEADER = b'REVMAP1\0' | HEADER = b'REVMAP1\0' | ||||
def __init__(self, path=None): | def __init__(self, path=None): |
) | ) | ||||
from . import ( | from . import ( | ||||
context, | context, | ||||
revmap, | revmap, | ||||
) | ) | ||||
class _lazyfctx(object): | class _lazyfctx: | ||||
"""delegates to fctx but do not construct fctx when unnecessary""" | """delegates to fctx but do not construct fctx when unnecessary""" | ||||
def __init__(self, repo, node, path): | def __init__(self, repo, node, path): | ||||
self._node = node | self._node = node | ||||
self._path = path | self._path = path | ||||
self._repo = repo | self._repo = repo | ||||
def node(self): | def node(self): |
"""Returns the names of [fix] config options that have suboptions""" | """Returns the names of [fix] config options that have suboptions""" | ||||
names = set() | names = set() | ||||
for k, v in ui.configitems(b'fix'): | for k, v in ui.configitems(b'fix'): | ||||
if b':' in k: | if b':' in k: | ||||
names.add(k.split(b':', 1)[0]) | names.add(k.split(b':', 1)[0]) | ||||
return names | return names | ||||
class Fixer(object): | class Fixer: | ||||
"""Wraps the raw config values for a fixer with methods""" | """Wraps the raw config values for a fixer with methods""" | ||||
def __init__( | def __init__( | ||||
self, command, pattern, linerange, priority, metadata, skipclean | self, command, pattern, linerange, priority, metadata, skipclean | ||||
): | ): | ||||
self._command = command | self._command = command | ||||
self._pattern = pattern | self._pattern = pattern | ||||
self._linerange = linerange | self._linerange = linerange |
) | ) | ||||
modified, added, removed, deleted, unknown, ignored, clean = rv2 | modified, added, removed, deleted, unknown, ignored, clean = rv2 | ||||
return scmutil.status( | return scmutil.status( | ||||
modified, added, removed, deleted, unknown, ignored, clean | modified, added, removed, deleted, unknown, ignored, clean | ||||
) | ) | ||||
class poststatus(object): | class poststatus: | ||||
def __init__(self, startclock): | def __init__(self, startclock): | ||||
self._startclock = pycompat.sysbytes(startclock) | self._startclock = pycompat.sysbytes(startclock) | ||||
def __call__(self, wctx, status): | def __call__(self, wctx, status): | ||||
clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock | clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock | ||||
hashignore = _hashignore(wctx.repo().dirstate._ignore) | hashignore = _hashignore(wctx.repo().dirstate._ignore) | ||||
notefiles = ( | notefiles = ( | ||||
status.modified | status.modified | ||||
return orig(source, link_name) | return orig(source, link_name) | ||||
finally: | finally: | ||||
try: | try: | ||||
os.utime(os.path.dirname(link_name), None) | os.utime(os.path.dirname(link_name), None) | ||||
except OSError: | except OSError: | ||||
pass | pass | ||||
class state_update(object): | class state_update: | ||||
"""This context manager is responsible for dispatching the state-enter | """This context manager is responsible for dispatching the state-enter | ||||
and state-leave signals to the watchman service. The enter and leave | and state-leave signals to the watchman service. The enter and leave | ||||
methods can be invoked manually (for scenarios where context manager | methods can be invoked manually (for scenarios where context manager | ||||
semantics are not possible). If parameters oldnode and newnode are None, | semantics are not possible). If parameters oldnode and newnode are None, | ||||
they will be populated based on current working copy in enter and | they will be populated based on current working copy in enter and | ||||
leave, respectively. Similarly, if the distance is none, it will be | leave, respectively. Similarly, if the distance is none, it will be | ||||
calculated based on the oldnode and newnode in the leave method.""" | calculated based on the oldnode and newnode in the leave method.""" | ||||
""" | """ | ||||
def __init__(self, msg, cmd=None): | def __init__(self, msg, cmd=None): | ||||
super(CommandError, self).__init__( | super(CommandError, self).__init__( | ||||
"watchman command error: %s" % (msg,), cmd | "watchman command error: %s" % (msg,), cmd | ||||
) | ) | ||||
class Transport(object): | class Transport: | ||||
"""communication transport to the watchman server""" | """communication transport to the watchman server""" | ||||
buf = None | buf = None | ||||
def close(self): | def close(self): | ||||
"""tear it down""" | """tear it down""" | ||||
raise NotImplementedError() | raise NotImplementedError() | ||||
if b"\n" in b: | if b"\n" in b: | ||||
result = b"".join(self.buf) | result = b"".join(self.buf) | ||||
(line, b) = b.split(b"\n", 1) | (line, b) = b.split(b"\n", 1) | ||||
self.buf = [b] | self.buf = [b] | ||||
return result + line | return result + line | ||||
self.buf.append(b) | self.buf.append(b) | ||||
class Codec(object): | class Codec: | ||||
"""communication encoding for the watchman server""" | """communication encoding for the watchman server""" | ||||
transport = None | transport = None | ||||
def __init__(self, transport): | def __init__(self, transport): | ||||
self.transport = transport | self.transport = transport | ||||
def receive(self): | def receive(self): | ||||
# In Python 3, json.dumps is a transformation from objects possibly | # In Python 3, json.dumps is a transformation from objects possibly | ||||
# containing Unicode strings to Unicode string. Even with (the default) | # containing Unicode strings to Unicode string. Even with (the default) | ||||
# ensure_ascii=True, dumps returns a Unicode string. | # ensure_ascii=True, dumps returns a Unicode string. | ||||
if compat.PYTHON3: | if compat.PYTHON3: | ||||
cmd = cmd.encode("ascii") | cmd = cmd.encode("ascii") | ||||
self.transport.write(cmd + b"\n") | self.transport.write(cmd + b"\n") | ||||
class client(object): | class client: | ||||
"""Handles the communication with the watchman service""" | """Handles the communication with the watchman service""" | ||||
sockpath = None | sockpath = None | ||||
transport = None | transport = None | ||||
sendCodec = None | sendCodec = None | ||||
recvCodec = None | recvCodec = None | ||||
sendConn = None | sendConn = None | ||||
recvConn = None | recvConn = None |
def _buf_pos(buf, pos): | def _buf_pos(buf, pos): | ||||
ret = buf[pos] | ret = buf[pos] | ||||
# Normalize the return type to bytes | # Normalize the return type to bytes | ||||
if compat.PYTHON3 and not isinstance(ret, bytes): | if compat.PYTHON3 and not isinstance(ret, bytes): | ||||
ret = bytes((ret,)) | ret = bytes((ret,)) | ||||
return ret | return ret | ||||
class _bser_buffer(object): | class _bser_buffer: | ||||
def __init__(self, version): | def __init__(self, version): | ||||
self.bser_version = version | self.bser_version = version | ||||
self.buf = ctypes.create_string_buffer(8192) | self.buf = ctypes.create_string_buffer(8192) | ||||
if self.bser_version == 1: | if self.bser_version == 1: | ||||
struct.pack_into( | struct.pack_into( | ||||
tobytes(len(EMPTY_HEADER)) + b"s", self.buf, 0, EMPTY_HEADER | tobytes(len(EMPTY_HEADER)) + b"s", self.buf, 0, EMPTY_HEADER | ||||
) | ) | ||||
self.wpos = len(EMPTY_HEADER) | self.wpos = len(EMPTY_HEADER) | ||||
struct.pack_into(b"=i", bser_buf.buf, 2, capabilities) | struct.pack_into(b"=i", bser_buf.buf, 2, capabilities) | ||||
struct.pack_into(b"=i", bser_buf.buf, 7, obj_len) | struct.pack_into(b"=i", bser_buf.buf, 7, obj_len) | ||||
return bser_buf.buf.raw[: bser_buf.wpos] | return bser_buf.buf.raw[: bser_buf.wpos] | ||||
# This is a quack-alike with the bserObjectType in bser.c | # This is a quack-alike with the bserObjectType in bser.c | ||||
# It provides by getattr accessors and getitem for both index | # It provides by getattr accessors and getitem for both index | ||||
# and name. | # and name. | ||||
class _BunserDict(object): | class _BunserDict: | ||||
__slots__ = ("_keys", "_values") | __slots__ = ("_keys", "_values") | ||||
def __init__(self, keys, values): | def __init__(self, keys, values): | ||||
self._keys = keys | self._keys = keys | ||||
self._values = values | self._values = values | ||||
def __getattr__(self, name): | def __getattr__(self, name): | ||||
return self.__getitem__(name) | return self.__getitem__(name) | ||||
return self._values[self._keys.index(key)] | return self._values[self._keys.index(key)] | ||||
except ValueError: | except ValueError: | ||||
raise KeyError("_BunserDict has no key %s" % key) | raise KeyError("_BunserDict has no key %s" % key) | ||||
def __len__(self): | def __len__(self): | ||||
return len(self._keys) | return len(self._keys) | ||||
class Bunser(object): | class Bunser: | ||||
def __init__(self, mutable=True, value_encoding=None, value_errors=None): | def __init__(self, mutable=True, value_encoding=None, value_errors=None): | ||||
self.mutable = mutable | self.mutable = mutable | ||||
self.value_encoding = value_encoding | self.value_encoding = value_encoding | ||||
if value_encoding is None: | if value_encoding is None: | ||||
self.value_errors = None | self.value_errors = None | ||||
elif value_errors is None: | elif value_errors is None: | ||||
self.value_errors = "strict" | self.value_errors = "strict" |
pathutil, | pathutil, | ||||
util, | util, | ||||
) | ) | ||||
_version = 4 | _version = 4 | ||||
_versionformat = b">I" | _versionformat = b">I" | ||||
class state(object): | class state: | ||||
def __init__(self, repo): | def __init__(self, repo): | ||||
self._vfs = repo.vfs | self._vfs = repo.vfs | ||||
self._ui = repo.ui | self._ui = repo.ui | ||||
self._rootdir = pathutil.normasprefix(repo.root) | self._rootdir = pathutil.normasprefix(repo.root) | ||||
self._lastclock = None | self._lastclock = None | ||||
self._identity = util.filestat(None) | self._identity = util.filestat(None) | ||||
self.mode = self._ui.config(b'fsmonitor', b'mode') | self.mode = self._ui.config(b'fsmonitor', b'mode') |
class WatchmanNoRoot(Unavailable): | class WatchmanNoRoot(Unavailable): | ||||
def __init__(self, root, msg): | def __init__(self, root, msg): | ||||
self.root = root | self.root = root | ||||
super(WatchmanNoRoot, self).__init__(msg) | super(WatchmanNoRoot, self).__init__(msg) | ||||
class client(object): | class client: | ||||
def __init__(self, ui, root, timeout=1.0): | def __init__(self, ui, root, timeout=1.0): | ||||
err = None | err = None | ||||
if not self._user: | if not self._user: | ||||
err = b"couldn't get user" | err = b"couldn't get user" | ||||
warn = True | warn = True | ||||
if self._user in ui.configlist(b'fsmonitor', b'blacklistusers'): | if self._user in ui.configlist(b'fsmonitor', b'blacklistusers'): | ||||
err = b'user %s in blacklist' % self._user | err = b'user %s in blacklist' % self._user | ||||
warn = False | warn = False |
b"log-index-cache-miss", | b"log-index-cache-miss", | ||||
default=False, | default=False, | ||||
) | ) | ||||
getversion = gitutil.pygit2_version | getversion = gitutil.pygit2_version | ||||
# TODO: extract an interface for this in core | # TODO: extract an interface for this in core | ||||
class gitstore(object): # store.basicstore): | class gitstore: # store.basicstore): | ||||
def __init__(self, path, vfstype): | def __init__(self, path, vfstype): | ||||
self.vfs = vfstype(path) | self.vfs = vfstype(path) | ||||
self.opener = self.vfs | self.opener = self.vfs | ||||
self.path = self.vfs.base | self.path = self.vfs.base | ||||
self.createmode = store._calcmode(self.vfs) | self.createmode = store._calcmode(self.vfs) | ||||
# above lines should go away in favor of: | # above lines should go away in favor of: | ||||
# super(gitstore, self).__init__(path, vfstype) | # super(gitstore, self).__init__(path, vfstype) | ||||
b'pygit2 library to be installed' | b'pygit2 library to be installed' | ||||
) | ) | ||||
) | ) | ||||
return gitstore(storebasepath, vfstype) | return gitstore(storebasepath, vfstype) | ||||
return orig(requirements, storebasepath, vfstype) | return orig(requirements, storebasepath, vfstype) | ||||
class gitfilestorage(object): | class gitfilestorage: | ||||
def file(self, path): | def file(self, path): | ||||
if path[0:1] == b'/': | if path[0:1] == b'/': | ||||
path = path[1:] | path = path[1:] | ||||
return gitlog.filelog(self.store.git, self.store._db, path) | return gitlog.filelog(self.store.git, self.store._db, path) | ||||
def _makefilestorage(orig, requirements, features, **kwargs): | def _makefilestorage(orig, requirements, features, **kwargs): | ||||
store = kwargs['store'] | store = kwargs['store'] | ||||
exclude.write(b'\n.hg\n') | exclude.write(b'\n.hg\n') | ||||
with open(os.path.join(dothg, b'requires'), 'wb') as f: | with open(os.path.join(dothg, b'requires'), 'wb') as f: | ||||
f.write(b'git\n') | f.write(b'git\n') | ||||
_BMS_PREFIX = 'refs/heads/' | _BMS_PREFIX = 'refs/heads/' | ||||
class gitbmstore(object): | class gitbmstore: | ||||
def __init__(self, gitrepo): | def __init__(self, gitrepo): | ||||
self.gitrepo = gitrepo | self.gitrepo = gitrepo | ||||
self._aclean = True | self._aclean = True | ||||
self._active = gitrepo.references['HEAD'] # git head, not mark | self._active = gitrepo.references['HEAD'] # git head, not mark | ||||
def __contains__(self, name): | def __contains__(self, name): | ||||
return ( | return ( | ||||
_BMS_PREFIX + pycompat.fsdecode(name) | _BMS_PREFIX + pycompat.fsdecode(name) |
pygit2.GIT_STATUS_WT_RENAMED: b'a', | pygit2.GIT_STATUS_WT_RENAMED: b'a', | ||||
pygit2.GIT_STATUS_WT_TYPECHANGE: b'n', | pygit2.GIT_STATUS_WT_TYPECHANGE: b'n', | ||||
pygit2.GIT_STATUS_WT_UNREADABLE: b'?', | pygit2.GIT_STATUS_WT_UNREADABLE: b'?', | ||||
pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm', | pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm', | ||||
} | } | ||||
@interfaceutil.implementer(intdirstate.idirstate) | @interfaceutil.implementer(intdirstate.idirstate) | ||||
class gitdirstate(object): | class gitdirstate: | ||||
def __init__(self, ui, root, gitrepo): | def __init__(self, ui, root, gitrepo): | ||||
self._ui = ui | self._ui = ui | ||||
self._root = os.path.dirname(root) | self._root = os.path.dirname(root) | ||||
self.git = gitrepo | self.git = gitrepo | ||||
self._plchangecallbacks = {} | self._plchangecallbacks = {} | ||||
# TODO: context.poststatusfixup is bad and uses this attribute | # TODO: context.poststatusfixup is bad and uses this attribute | ||||
self._dirty = False | self._dirty = False | ||||
gitutil, | gitutil, | ||||
index, | index, | ||||
manifest as gitmanifest, | manifest as gitmanifest, | ||||
) | ) | ||||
pygit2 = gitutil.get_pygit2() | pygit2 = gitutil.get_pygit2() | ||||
class baselog(object): # revlog.revlog): | class baselog: # revlog.revlog): | ||||
"""Common implementations between changelog and manifestlog.""" | """Common implementations between changelog and manifestlog.""" | ||||
def __init__(self, gr, db): | def __init__(self, gr, db): | ||||
self.gitrepo = gr | self.gitrepo = gr | ||||
self._db = db | self._db = db | ||||
def __len__(self): | def __len__(self): | ||||
return int( | return int( | ||||
def hasnode(self, n): | def hasnode(self, n): | ||||
t = self._db.execute( | t = self._db.execute( | ||||
'SELECT node FROM changelog WHERE node = ?', | 'SELECT node FROM changelog WHERE node = ?', | ||||
(pycompat.sysstr(n),), | (pycompat.sysstr(n),), | ||||
).fetchone() | ).fetchone() | ||||
return t is not None | return t is not None | ||||
class baselogindex(object): | class baselogindex: | ||||
def __init__(self, log): | def __init__(self, log): | ||||
self._log = log | self._log = log | ||||
def has_node(self, n): | def has_node(self, n): | ||||
return self._log.rev(n) != -1 | return self._log.rev(n) != -1 | ||||
def __len__(self): | def __len__(self): | ||||
return len(self._log) | return len(self._log) |
) | ) | ||||
from . import gitutil | from . import gitutil | ||||
pygit2 = gitutil.get_pygit2() | pygit2 = gitutil.get_pygit2() | ||||
@interfaceutil.implementer(repository.imanifestdict) | @interfaceutil.implementer(repository.imanifestdict) | ||||
class gittreemanifest(object): | class gittreemanifest: | ||||
"""Expose git trees (and optionally a builder's overlay) as a manifestdict. | """Expose git trees (and optionally a builder's overlay) as a manifestdict. | ||||
Very similar to mercurial.manifest.treemanifest. | Very similar to mercurial.manifest.treemanifest. | ||||
""" | """ | ||||
def __init__(self, git_repo, root_tree, pending_changes): | def __init__(self, git_repo, root_tree, pending_changes): | ||||
"""Initializer. | """Initializer. | ||||
# being clever about walking over the sets... | # being clever about walking over the sets... | ||||
baseline = set(self._walkonetree(self._tree, match, b'')) | baseline = set(self._walkonetree(self._tree, match, b'')) | ||||
deleted = {p for p, v in self._pending_changes.items() if v is None} | deleted = {p for p, v in self._pending_changes.items() if v is None} | ||||
pend = {p for p in self._pending_changes if match(p)} | pend = {p for p in self._pending_changes if match(p)} | ||||
return iter(sorted((baseline | pend) - deleted)) | return iter(sorted((baseline | pend) - deleted)) | ||||
@interfaceutil.implementer(repository.imanifestrevisionstored) | @interfaceutil.implementer(repository.imanifestrevisionstored) | ||||
class gittreemanifestctx(object): | class gittreemanifestctx: | ||||
def __init__(self, repo, gittree): | def __init__(self, repo, gittree): | ||||
self._repo = repo | self._repo = repo | ||||
self._tree = gittree | self._tree = gittree | ||||
def read(self): | def read(self): | ||||
return gittreemanifest(self._repo, self._tree, None) | return gittreemanifest(self._repo, self._tree, None) | ||||
def readfast(self, shallow=False): | def readfast(self, shallow=False): | ||||
return self.read() | return self.read() | ||||
def copy(self): | def copy(self): | ||||
# NB: it's important that we return a memgittreemanifestctx | # NB: it's important that we return a memgittreemanifestctx | ||||
# because the caller expects a mutable manifest. | # because the caller expects a mutable manifest. | ||||
return memgittreemanifestctx(self._repo, self._tree) | return memgittreemanifestctx(self._repo, self._tree) | ||||
def find(self, path): | def find(self, path): | ||||
return self.read()[path] | return self.read()[path] | ||||
@interfaceutil.implementer(repository.imanifestrevisionwritable) | @interfaceutil.implementer(repository.imanifestrevisionwritable) | ||||
class memgittreemanifestctx(object): | class memgittreemanifestctx: | ||||
def __init__(self, repo, tree): | def __init__(self, repo, tree): | ||||
self._repo = repo | self._repo = repo | ||||
self._tree = tree | self._tree = tree | ||||
# dict of path: Optional[Tuple(node, flags)] | # dict of path: Optional[Tuple(node, flags)] | ||||
self._pending_changes = {} | self._pending_changes = {} | ||||
def read(self): | def read(self): | ||||
return gittreemanifest(self._repo, self._tree, self._pending_changes) | return gittreemanifest(self._repo, self._tree, self._pending_changes) |
(k, convert(v)) if isinstance(v, bytes) else (k, v) | (k, convert(v)) if isinstance(v, bytes) else (k, v) | ||||
for k, v in opts.items() | for k, v in opts.items() | ||||
] | ] | ||||
) | ) | ||||
return args, opts | return args, opts | ||||
class Command(object): | class Command: | ||||
def __init__(self, name): | def __init__(self, name): | ||||
self.name = name | self.name = name | ||||
self.args = [] | self.args = [] | ||||
self.opts = {} | self.opts = {} | ||||
def __bytes__(self): | def __bytes__(self): | ||||
cmd = b"hg " + self.name | cmd = b"hg " + self.name | ||||
if self.opts: | if self.opts: | ||||
def __setitem__(self, key, value): | def __setitem__(self, key, value): | ||||
values = self.opts.setdefault(key, []) | values = self.opts.setdefault(key, []) | ||||
values.append(value) | values.append(value) | ||||
def __and__(self, other): | def __and__(self, other): | ||||
return AndCommand(self, other) | return AndCommand(self, other) | ||||
class AndCommand(object): | class AndCommand: | ||||
def __init__(self, left, right): | def __init__(self, left, right): | ||||
self.left = left | self.left = left | ||||
self.right = right | self.right = right | ||||
def __str__(self): | def __str__(self): | ||||
return b"%s && %s" % (self.left, self.right) | return b"%s && %s" % (self.left, self.right) | ||||
def __and__(self, other): | def __and__(self, other): |
# Custom help category | # Custom help category | ||||
_HELP_CATEGORY = b'gpg' | _HELP_CATEGORY = b'gpg' | ||||
help.CATEGORY_ORDER.insert( | help.CATEGORY_ORDER.insert( | ||||
help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY | help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY | ||||
) | ) | ||||
help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)' | help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)' | ||||
class gpg(object): | class gpg: | ||||
def __init__(self, path, key=None): | def __init__(self, path, key=None): | ||||
self.path = path | self.path = path | ||||
self.key = (key and b" --local-user \"%s\"" % key) or b"" | self.key = (key and b" --local-user \"%s\"" % key) or b"" | ||||
def sign(self, data): | def sign(self, data): | ||||
gpgcmd = b"%s --sign --detach-sign%s" % (self.path, self.key) | gpgcmd = b"%s --sign --detach-sign%s" % (self.path, self.key) | ||||
return procutil.filter(data, gpgcmd) | return procutil.filter(data, gpgcmd) | ||||
b"will DISCARD it from the edited history!" | b"will DISCARD it from the edited history!" | ||||
) | ) | ||||
lines = (intro % (first, last)).split(b'\n') + actions + hints | lines = (intro % (first, last)).split(b'\n') + actions + hints | ||||
return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines]) | return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines]) | ||||
class histeditstate(object): | class histeditstate: | ||||
def __init__(self, repo): | def __init__(self, repo): | ||||
self.repo = repo | self.repo = repo | ||||
self.actions = None | self.actions = None | ||||
self.keep = None | self.keep = None | ||||
self.topmost = None | self.topmost = None | ||||
self.parentctxnode = None | self.parentctxnode = None | ||||
self.lock = None | self.lock = None | ||||
self.wlock = None | self.wlock = None | ||||
def clear(self): | def clear(self): | ||||
if self.inprogress(): | if self.inprogress(): | ||||
self.repo.vfs.unlink(b'histedit-state') | self.repo.vfs.unlink(b'histedit-state') | ||||
def inprogress(self): | def inprogress(self): | ||||
return self.repo.vfs.exists(b'histedit-state') | return self.repo.vfs.exists(b'histedit-state') | ||||
class histeditaction(object): | class histeditaction: | ||||
def __init__(self, state, node): | def __init__(self, state, node): | ||||
self.state = state | self.state = state | ||||
self.repo = state.repo | self.repo = state.repo | ||||
self.node = node | self.node = node | ||||
@classmethod | @classmethod | ||||
def fromrule(cls, state, rule): | def fromrule(cls, state, rule): | ||||
"""Parses the given rule, returning an instance of the histeditaction.""" | """Parses the given rule, returning an instance of the histeditaction.""" | ||||
MODE_HELP: {}, | MODE_HELP: {}, | ||||
} | } | ||||
def screen_size(): | def screen_size(): | ||||
return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' ')) | return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' ')) | ||||
class histeditrule(object): | class histeditrule: | ||||
def __init__(self, ui, ctx, pos, action=b'pick'): | def __init__(self, ui, ctx, pos, action=b'pick'): | ||||
self.ui = ui | self.ui = ui | ||||
self.ctx = ctx | self.ctx = ctx | ||||
self.action = action | self.action = action | ||||
self.origpos = pos | self.origpos = pos | ||||
self.pos = pos | self.pos = pos | ||||
self.conflicts = [] | self.conflicts = [] | ||||
def _trunc_tail(line, n): | def _trunc_tail(line, n): | ||||
if len(line) <= n: | if len(line) <= n: | ||||
return line | return line | ||||
return line[: n - 2] + b' >' | return line[: n - 2] + b' >' | ||||
class _chistedit_state(object): | class _chistedit_state: | ||||
def __init__( | def __init__( | ||||
self, | self, | ||||
repo, | repo, | ||||
rules, | rules, | ||||
stdscr, | stdscr, | ||||
): | ): | ||||
self.repo = repo | self.repo = repo | ||||
self.rules = rules | self.rules = rules |
if common.isremotebooksenabled(ui): | if common.isremotebooksenabled(ui): | ||||
hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/' | hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/' | ||||
if remotebookmark.startswith(hoist): | if remotebookmark.startswith(hoist): | ||||
return remotebookmark[len(hoist) :] | return remotebookmark[len(hoist) :] | ||||
return remotebookmark | return remotebookmark | ||||
class bundlestore(object): | class bundlestore: | ||||
def __init__(self, repo): | def __init__(self, repo): | ||||
self._repo = repo | self._repo = repo | ||||
storetype = self._repo.ui.config(b'infinitepush', b'storetype') | storetype = self._repo.ui.config(b'infinitepush', b'storetype') | ||||
if storetype == b'disk': | if storetype == b'disk': | ||||
from . import store | from . import store | ||||
self.store = store.filebundlestore(self._repo.ui, self._repo) | self.store = store.filebundlestore(self._repo.ui, self._repo) | ||||
elif storetype == b'external': | elif storetype == b'external': |
try: | try: | ||||
lfsmod = extensions.find(b'lfs') | lfsmod = extensions.find(b'lfs') | ||||
lfsmod.wrapper.uploadblobsfromrevs(repo, missing) | lfsmod.wrapper.uploadblobsfromrevs(repo, missing) | ||||
except KeyError: | except KeyError: | ||||
# Ignore if lfs extension is not enabled | # Ignore if lfs extension is not enabled | ||||
return | return | ||||
class copiedpart(object): | class copiedpart: | ||||
"""a copy of unbundlepart content that can be consumed later""" | """a copy of unbundlepart content that can be consumed later""" | ||||
def __init__(self, part): | def __init__(self, part): | ||||
# copy "public properties" | # copy "public properties" | ||||
self.type = part.type | self.type = part.type | ||||
self.id = part.id | self.id = part.id | ||||
self.mandatory = part.mandatory | self.mandatory = part.mandatory | ||||
self.mandatoryparams = part.mandatoryparams | self.mandatoryparams = part.mandatoryparams |
# Infinite push | # Infinite push | ||||
# | # | ||||
# Copyright 2016 Facebook, Inc. | # Copyright 2016 Facebook, Inc. | ||||
# | # | ||||
# This software may be used and distributed according to the terms of the | # This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | # GNU General Public License version 2 or any later version. | ||||
class indexapi(object): | class indexapi: | ||||
"""Class that manages access to infinitepush index. | """Class that manages access to infinitepush index. | ||||
This class is a context manager and all write operations (like | This class is a context manager and all write operations (like | ||||
deletebookmarks, addbookmark etc) should use `with` statement: | deletebookmarks, addbookmark etc) should use `with` statement: | ||||
with index: | with index: | ||||
index.deletebookmarks(...) | index.deletebookmarks(...) | ||||
... | ... |
class BundleWriteException(Exception): | class BundleWriteException(Exception): | ||||
pass | pass | ||||
class BundleReadException(Exception): | class BundleReadException(Exception): | ||||
pass | pass | ||||
class abstractbundlestore(object): # pytype: disable=ignored-metaclass | class abstractbundlestore: # pytype: disable=ignored-metaclass | ||||
"""Defines the interface for bundle stores. | """Defines the interface for bundle stores. | ||||
A bundle store is an entity that stores raw bundle data. It is a simple | A bundle store is an entity that stores raw bundle data. It is a simple | ||||
key-value store. However, the keys are chosen by the store. The keys can | key-value store. However, the keys are chosen by the store. The keys can | ||||
be any Python object understood by the corresponding bundle index (see | be any Python object understood by the corresponding bundle index (see | ||||
``abstractbundleindex`` below). | ``abstractbundleindex`` below). | ||||
""" | """ | ||||
Returns None if the bundle isn't known. | Returns None if the bundle isn't known. | ||||
Throws BundleReadException | Throws BundleReadException | ||||
The returned object should be a file object supporting read() | The returned object should be a file object supporting read() | ||||
and close(). | and close(). | ||||
""" | """ | ||||
class filebundlestore(object): | class filebundlestore: | ||||
"""bundle store in filesystem | """bundle store in filesystem | ||||
meant for storing bundles somewhere on disk and on network filesystems | meant for storing bundles somewhere on disk and on network filesystems | ||||
""" | """ | ||||
def __init__(self, ui, repo): | def __init__(self, ui, repo): | ||||
self.ui = ui | self.ui = ui | ||||
self.repo = repo | self.repo = repo |
oldhashes, | oldhashes, | ||||
newhashes, | newhashes, | ||||
) | ) | ||||
) | ) | ||||
__str__ = encoding.strmethod(__bytes__) | __str__ = encoding.strmethod(__bytes__) | ||||
class journalstorage(object): | class journalstorage: | ||||
"""Storage for journal entries | """Storage for journal entries | ||||
Entries are divided over two files; one with entries that pertain to the | Entries are divided over two files; one with entries that pertain to the | ||||
local working copy *only*, and one with entries that are shared across | local working copy *only*, and one with entries that are shared across | ||||
multiple working copies when shared using the share extension. | multiple working copies when shared using the share extension. | ||||
Entries are stored with NUL bytes as separators. See the journalentry | Entries are stored with NUL bytes as separators. See the journalentry | ||||
class for the per-entry structure. | class for the per-entry structure. |
"""Retrieves modified and added files from a working directory state | """Retrieves modified and added files from a working directory state | ||||
and returns the subset of each contained in given changed files | and returns the subset of each contained in given changed files | ||||
retrieved from a change context.""" | retrieved from a change context.""" | ||||
modified = [f for f in wstatus.modified if f in changed] | modified = [f for f in wstatus.modified if f in changed] | ||||
added = [f for f in wstatus.added if f in changed] | added = [f for f in wstatus.added if f in changed] | ||||
return modified, added | return modified, added | ||||
class kwtemplater(object): | class kwtemplater: | ||||
""" | """ | ||||
Sets up keyword templates, corresponding keyword regex, and | Sets up keyword templates, corresponding keyword regex, and | ||||
provides keyword substitution functions. | provides keyword substitution functions. | ||||
""" | """ | ||||
def __init__(self, ui, repo, inc, exc): | def __init__(self, ui, repo, inc, exc): | ||||
self.ui = ui | self.ui = ui | ||||
self._repo = weakref.ref(repo) | self._repo = weakref.ref(repo) |
self.filename, | self.filename, | ||||
self.detail, | self.detail, | ||||
) | ) | ||||
def __str__(self): | def __str__(self): | ||||
return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail) | return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail) | ||||
class basestore(object): | class basestore: | ||||
def __init__(self, ui, repo, url): | def __init__(self, ui, repo, url): | ||||
self.ui = ui | self.ui = ui | ||||
self.repo = repo | self.repo = repo | ||||
self.url = url | self.url = url | ||||
def put(self, source, hash): | def put(self, source, hash): | ||||
'''Put source file into the store so it can be retrieved by hash.''' | '''Put source file into the store so it can be retrieved by hash.''' | ||||
raise NotImplementedError(b'abstract method') | raise NotImplementedError(b'abstract method') |
else: | else: | ||||
return f in standins | return f in standins | ||||
match.matchfn = matchfn | match.matchfn = matchfn | ||||
return match | return match | ||||
class automatedcommithook(object): | class automatedcommithook: | ||||
"""Stateful hook to update standins at the 1st commit of resuming | """Stateful hook to update standins at the 1st commit of resuming | ||||
For efficiency, updating standins in the working directory should | For efficiency, updating standins in the working directory should | ||||
be avoided while automated committing (like rebase, transplant and | be avoided while automated committing (like rebase, transplant and | ||||
so on), because they should be updated before committing. | so on), because they should be updated before committing. | ||||
But the 1st commit of resuming automated committing (e.g. ``rebase | But the 1st commit of resuming automated committing (e.g. ``rebase | ||||
--continue``) should update them, because largefiles may be | --continue``) should update them, because largefiles may be |
@eh.wrapcommand( | @eh.wrapcommand( | ||||
b'debugstate', | b'debugstate', | ||||
opts=[(b'', b'large', None, _(b'display largefiles dirstate'))], | opts=[(b'', b'large', None, _(b'display largefiles dirstate'))], | ||||
) | ) | ||||
def overridedebugstate(orig, ui, repo, *pats, **opts): | def overridedebugstate(orig, ui, repo, *pats, **opts): | ||||
large = opts.pop('large', False) | large = opts.pop('large', False) | ||||
if large: | if large: | ||||
class fakerepo(object): | class fakerepo: | ||||
dirstate = lfutil.openlfdirstate(ui, repo) | dirstate = lfutil.openlfdirstate(ui, repo) | ||||
orig(ui, fakerepo, *pats, **opts) | orig(ui, fakerepo, *pats, **opts) | ||||
else: | else: | ||||
orig(ui, repo, *pats, **opts) | orig(ui, repo, *pats, **opts) | ||||
# Before starting the manifest merge, merge.updates will call | # Before starting the manifest merge, merge.updates will call |
def __init__(self, ui, filename): | def __init__(self, ui, filename): | ||||
super(lfsuploadfile, self).__init__(ui, filename, b'rb') | super(lfsuploadfile, self).__init__(ui, filename, b'rb') | ||||
self.read = self._data.read | self.read = self._data.read | ||||
def _makeprogress(self): | def _makeprogress(self): | ||||
return None # progress is handled by the worker client | return None # progress is handled by the worker client | ||||
class local(object): | class local: | ||||
"""Local blobstore for large file contents. | """Local blobstore for large file contents. | ||||
This blobstore is used both as a cache and as a staging area for large blobs | This blobstore is used both as a cache and as a staging area for large blobs | ||||
to be uploaded to the remote blobstore. | to be uploaded to the remote blobstore. | ||||
""" | """ | ||||
def __init__(self, repo): | def __init__(self, repo): | ||||
fullpath = repo.svfs.join(b'lfs/objects') | fullpath = repo.svfs.join(b'lfs/objects') | ||||
code, | code, | ||||
encoding.strfromlocal(msg), | encoding.strfromlocal(msg), | ||||
headers, | headers, | ||||
fp, | fp, | ||||
) | ) | ||||
return None | return None | ||||
class _gitlfsremote(object): | class _gitlfsremote: | ||||
def __init__(self, repo, url): | def __init__(self, repo, url): | ||||
ui = repo.ui | ui = repo.ui | ||||
self.ui = ui | self.ui = ui | ||||
baseurl, authinfo = url.authinfo() | baseurl, authinfo = url.authinfo() | ||||
self.baseurl = baseurl.rstrip(b'/') | self.baseurl = baseurl.rstrip(b'/') | ||||
useragent = repo.ui.config(b'experimental', b'lfs.user-agent') | useragent = repo.ui.config(b'experimental', b'lfs.user-agent') | ||||
if not useragent: | if not useragent: | ||||
useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version() | useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version() | ||||
# copied from mercurial/httppeer.py | # copied from mercurial/httppeer.py | ||||
urlopener = getattr(self, 'urlopener', None) | urlopener = getattr(self, 'urlopener', None) | ||||
if urlopener: | if urlopener: | ||||
for h in urlopener.handlers: | for h in urlopener.handlers: | ||||
h.close() | h.close() | ||||
getattr(h, "close_all", lambda: None)() | getattr(h, "close_all", lambda: None)() | ||||
class _dummyremote(object): | class _dummyremote: | ||||
"""Dummy store storing blobs to temp directory.""" | """Dummy store storing blobs to temp directory.""" | ||||
def __init__(self, repo, url): | def __init__(self, repo, url): | ||||
fullpath = repo.vfs.join(b'lfs', url.path) | fullpath = repo.vfs.join(b'lfs', url.path) | ||||
self.vfs = lfsvfs(fullpath) | self.vfs = lfsvfs(fullpath) | ||||
def writebatch(self, pointers, fromstore): | def writebatch(self, pointers, fromstore): | ||||
for p in _deduplicate(pointers): | for p in _deduplicate(pointers): | ||||
content = fromstore.read(p.oid(), verify=True) | content = fromstore.read(p.oid(), verify=True) | ||||
with self.vfs(p.oid(), b'wb', atomictemp=True) as fp: | with self.vfs(p.oid(), b'wb', atomictemp=True) as fp: | ||||
fp.write(content) | fp.write(content) | ||||
def readbatch(self, pointers, tostore): | def readbatch(self, pointers, tostore): | ||||
for p in _deduplicate(pointers): | for p in _deduplicate(pointers): | ||||
with self.vfs(p.oid(), b'rb') as fp: | with self.vfs(p.oid(), b'rb') as fp: | ||||
tostore.download(p.oid(), fp, None) | tostore.download(p.oid(), fp, None) | ||||
class _nullremote(object): | class _nullremote: | ||||
"""Null store storing blobs to /dev/null.""" | """Null store storing blobs to /dev/null.""" | ||||
def __init__(self, repo, url): | def __init__(self, repo, url): | ||||
pass | pass | ||||
def writebatch(self, pointers, fromstore): | def writebatch(self, pointers, fromstore): | ||||
pass | pass | ||||
def readbatch(self, pointers, tostore): | def readbatch(self, pointers, tostore): | ||||
pass | pass | ||||
class _promptremote(object): | class _promptremote: | ||||
"""Prompt user to set lfs.url when accessed.""" | """Prompt user to set lfs.url when accessed.""" | ||||
def __init__(self, repo, url): | def __init__(self, repo, url): | ||||
pass | pass | ||||
def writebatch(self, pointers, fromstore, ui=None): | def writebatch(self, pointers, fromstore, ui=None): | ||||
self._prompt() | self._prompt() | ||||
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | ||||
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | ||||
# be specifying the version(s) of Mercurial they are tested with, or | # be specifying the version(s) of Mercurial they are tested with, or | ||||
# leave the attribute unspecified. | # leave the attribute unspecified. | ||||
testedwith = b'ships-with-hg-core' | testedwith = b'ships-with-hg-core' | ||||
class processlogger(object): | class processlogger: | ||||
"""Map log events to external commands | """Map log events to external commands | ||||
Arguments are passed on as environment variables. | Arguments are passed on as environment variables. | ||||
""" | """ | ||||
def __init__(self, ui): | def __init__(self, ui): | ||||
self._scripts = dict(ui.configitems(b'logtoprocess')) | self._scripts = dict(ui.configitems(b'logtoprocess')) | ||||
) | ) | ||||
# force load strip extension formerly included in mq and import some utility | # force load strip extension formerly included in mq and import some utility | ||||
try: | try: | ||||
extensions.find(b'strip') | extensions.find(b'strip') | ||||
except KeyError: | except KeyError: | ||||
# note: load is lazy so we could avoid the try-except, | # note: load is lazy so we could avoid the try-except, | ||||
# but I (marmoute) prefer this explicit code. | # but I (marmoute) prefer this explicit code. | ||||
class dummyui(object): | class dummyui: | ||||
def debug(self, msg): | def debug(self, msg): | ||||
pass | pass | ||||
def log(self, event, msgfmt, *msgargs, **opts): | def log(self, event, msgfmt, *msgargs, **opts): | ||||
pass | pass | ||||
extensions.load(dummyui(), b'strip', b'') | extensions.load(dummyui(), b'strip', b'') | ||||
return inclsubs | return inclsubs | ||||
# Patch names looks like unix-file names. | # Patch names looks like unix-file names. | ||||
# They must be joinable with queue directory and result in the patch path. | # They must be joinable with queue directory and result in the patch path. | ||||
normname = util.normpath | normname = util.normpath | ||||
class statusentry(object): | class statusentry: | ||||
def __init__(self, node, name): | def __init__(self, node, name): | ||||
self.node, self.name = node, name | self.node, self.name = node, name | ||||
def __bytes__(self): | def __bytes__(self): | ||||
return hex(self.node) + b':' + self.name | return hex(self.node) + b':' + self.name | ||||
__str__ = encoding.strmethod(__bytes__) | __str__ = encoding.strmethod(__bytes__) | ||||
__repr__ = encoding.strmethod(__bytes__) | __repr__ = encoding.strmethod(__bytes__) | ||||
lines.insert(i, b'') | lines.insert(i, b'') | ||||
if i < bestpos: | if i < bestpos: | ||||
bestpos = i | bestpos = i | ||||
break | break | ||||
lines.insert(bestpos, b'%s: %s' % (header, value)) | lines.insert(bestpos, b'%s: %s' % (header, value)) | ||||
return lines | return lines | ||||
class patchheader(object): | class patchheader: | ||||
def __init__(self, pf, plainmode=False): | def __init__(self, pf, plainmode=False): | ||||
def eatdiff(lines): | def eatdiff(lines): | ||||
while lines: | while lines: | ||||
l = lines[-1] | l = lines[-1] | ||||
if ( | if ( | ||||
l.startswith(b"diff -") | l.startswith(b"diff -") | ||||
or l.startswith(b"Index:") | or l.startswith(b"Index:") | ||||
or l.startswith(b"===========") | or l.startswith(b"===========") | ||||
repo.ui.setconfig(b'ui', b'allowemptycommit', True) | repo.ui.setconfig(b'ui', b'allowemptycommit', True) | ||||
return repo.commit(*args, **kwargs) | return repo.commit(*args, **kwargs) | ||||
class AbortNoCleanup(error.Abort): | class AbortNoCleanup(error.Abort): | ||||
pass | pass | ||||
class queue(object): | class queue: | ||||
def __init__(self, ui, baseui, path, patchdir=None): | def __init__(self, ui, baseui, path, patchdir=None): | ||||
self.basepath = path | self.basepath = path | ||||
try: | try: | ||||
with open(os.path.join(path, b'patches.queue'), 'rb') as fh: | with open(os.path.join(path, b'patches.queue'), 'rb') as fh: | ||||
cur = fh.read().rstrip() | cur = fh.read().rstrip() | ||||
if not cur: | if not cur: | ||||
curpath = os.path.join(path, b'patches') | curpath = os.path.join(path, b'patches') |
summary: {desc|firstline} | summary: {desc|firstline} | ||||
''' | ''' | ||||
deftemplates = { | deftemplates = { | ||||
b'changegroup': multiple_template, | b'changegroup': multiple_template, | ||||
} | } | ||||
class notifier(object): | class notifier: | ||||
'''email notification class.''' | '''email notification class.''' | ||||
def __init__(self, ui, repo, hooktype): | def __init__(self, ui, repo, hooktype): | ||||
self.ui = ui | self.ui = ui | ||||
cfg = self.ui.config(b'notify', b'config') | cfg = self.ui.config(b'notify', b'config') | ||||
if cfg: | if cfg: | ||||
self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs']) | self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs']) | ||||
self.repo = repo | self.repo = repo |
output = util.stringio() | output = util.stringio() | ||||
for chunk, _label in patch.diffui( | for chunk, _label in patch.diffui( | ||||
ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts | ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts | ||||
): | ): | ||||
output.write(chunk) | output.write(chunk) | ||||
return output.getvalue() | return output.getvalue() | ||||
class DiffChangeType(object): | class DiffChangeType: | ||||
ADD = 1 | ADD = 1 | ||||
CHANGE = 2 | CHANGE = 2 | ||||
DELETE = 3 | DELETE = 3 | ||||
MOVE_AWAY = 4 | MOVE_AWAY = 4 | ||||
COPY_AWAY = 5 | COPY_AWAY = 5 | ||||
MOVE_HERE = 6 | MOVE_HERE = 6 | ||||
COPY_HERE = 7 | COPY_HERE = 7 | ||||
MULTICOPY = 8 | MULTICOPY = 8 | ||||
class DiffFileType(object): | class DiffFileType: | ||||
TEXT = 1 | TEXT = 1 | ||||
IMAGE = 2 | IMAGE = 2 | ||||
BINARY = 3 | BINARY = 3 | ||||
@attr.s | @attr.s | ||||
class phabhunk(dict): | class phabhunk(dict): | ||||
"""Represents a Differential hunk, which is owned by a Differential change""" | """Represents a Differential hunk, which is owned by a Differential change""" | ||||
oldOffset = attr.ib(default=0) # camelcase-required | oldOffset = attr.ib(default=0) # camelcase-required | ||||
oldLength = attr.ib(default=0) # camelcase-required | oldLength = attr.ib(default=0) # camelcase-required | ||||
newOffset = attr.ib(default=0) # camelcase-required | newOffset = attr.ib(default=0) # camelcase-required | ||||
newLength = attr.ib(default=0) # camelcase-required | newLength = attr.ib(default=0) # camelcase-required | ||||
corpus = attr.ib(default='') | corpus = attr.ib(default='') | ||||
# These get added to the phabchange's equivalents | # These get added to the phabchange's equivalents | ||||
addLines = attr.ib(default=0) # camelcase-required | addLines = attr.ib(default=0) # camelcase-required | ||||
delLines = attr.ib(default=0) # camelcase-required | delLines = attr.ib(default=0) # camelcase-required | ||||
@attr.s | @attr.s | ||||
class phabchange(object): | class phabchange: | ||||
"""Represents a Differential change, owns Differential hunks and owned by a | """Represents a Differential change, owns Differential hunks and owned by a | ||||
Differential diff. Each one represents one file in a diff. | Differential diff. Each one represents one file in a diff. | ||||
""" | """ | ||||
currentPath = attr.ib(default=None) # camelcase-required | currentPath = attr.ib(default=None) # camelcase-required | ||||
oldPath = attr.ib(default=None) # camelcase-required | oldPath = attr.ib(default=None) # camelcase-required | ||||
awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required | awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required | ||||
metadata = attr.ib(default=attr.Factory(dict)) | metadata = attr.ib(default=attr.Factory(dict)) | ||||
# It's useful to include these stats since the Phab web UI shows them, | # It's useful to include these stats since the Phab web UI shows them, | ||||
# and uses them to estimate how large a change a Revision is. Also used | # and uses them to estimate how large a change a Revision is. Also used | ||||
# in email subjects for the [+++--] bit. | # in email subjects for the [+++--] bit. | ||||
self.addLines += hunk.addLines | self.addLines += hunk.addLines | ||||
self.delLines += hunk.delLines | self.delLines += hunk.delLines | ||||
@attr.s | @attr.s | ||||
class phabdiff(object): | class phabdiff: | ||||
"""Represents a Differential diff, owns Differential changes. Corresponds | """Represents a Differential diff, owns Differential changes. Corresponds | ||||
to a commit. | to a commit. | ||||
""" | """ | ||||
# Doesn't seem to be any reason to send this (output of uname -n) | # Doesn't seem to be any reason to send this (output of uname -n) | ||||
sourceMachine = attr.ib(default=b'') # camelcase-required | sourceMachine = attr.ib(default=b'') # camelcase-required | ||||
sourcePath = attr.ib(default=b'/') # camelcase-required | sourcePath = attr.ib(default=b'/') # camelcase-required | ||||
sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required | sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required |
def _ctxdesc(ctx): | def _ctxdesc(ctx): | ||||
"""short description for a context""" | """short description for a context""" | ||||
return cmdutil.format_changeset_summary( | return cmdutil.format_changeset_summary( | ||||
ctx.repo().ui, ctx, command=b'rebase' | ctx.repo().ui, ctx, command=b'rebase' | ||||
) | ) | ||||
class rebaseruntime(object): | class rebaseruntime: | ||||
"""This class is a container for rebase runtime state""" | """This class is a container for rebase runtime state""" | ||||
def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None): | def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None): | ||||
if opts is None: | if opts is None: | ||||
opts = {} | opts = {} | ||||
# prepared: whether we have rebasestate prepared or not. Currently it | # prepared: whether we have rebasestate prepared or not. Currently it | ||||
# decides whether "self.repo" is unfiltered or not. | # decides whether "self.repo" is unfiltered or not. |
] | ] | ||||
RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') | RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') | ||||
RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b' | RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b' | ||||
BULLET_SECTION = _(b'Other Changes') | BULLET_SECTION = _(b'Other Changes') | ||||
class parsedreleasenotes(object): | class parsedreleasenotes: | ||||
def __init__(self): | def __init__(self): | ||||
self.sections = {} | self.sections = {} | ||||
def __contains__(self, section): | def __contains__(self, section): | ||||
return section in self.sections | return section in self.sections | ||||
def __iter__(self): | def __iter__(self): | ||||
return iter(sorted(self.sections)) | return iter(sorted(self.sections)) | ||||
continue | continue | ||||
if similar(ui, existingnotes, incoming_str): | if similar(ui, existingnotes, incoming_str): | ||||
continue | continue | ||||
self.addnontitleditem(section, paragraphs) | self.addnontitleditem(section, paragraphs) | ||||
class releasenotessections(object): | class releasenotessections: | ||||
def __init__(self, ui, repo=None): | def __init__(self, ui, repo=None): | ||||
if repo: | if repo: | ||||
sections = util.sortdict(DEFAULT_SECTIONS) | sections = util.sortdict(DEFAULT_SECTIONS) | ||||
custom_sections = getcustomadmonitions(repo) | custom_sections = getcustomadmonitions(repo) | ||||
if custom_sections: | if custom_sections: | ||||
sections.update(custom_sections) | sections.update(custom_sections) | ||||
self._sections = list(sections.items()) | self._sections = list(sections.items()) | ||||
else: | else: |
# With glibc 2.7+ the 'e' flag uses O_CLOEXEC when opening. | # With glibc 2.7+ the 'e' flag uses O_CLOEXEC when opening. | ||||
# The 'e' flag will be ignored on older versions of glibc. | # The 'e' flag will be ignored on older versions of glibc. | ||||
# Python 3 can't handle the 'e' flag. | # Python 3 can't handle the 'e' flag. | ||||
PACKOPENMODE = b'rbe' | PACKOPENMODE = b'rbe' | ||||
else: | else: | ||||
PACKOPENMODE = b'rb' | PACKOPENMODE = b'rb' | ||||
Here a py2 compat fix in case you want to follow-up