pycompat.itervalues(x) just calls x.values(). So this is equivalent.
The rewrite was perfomed via an automated search and replace.
( )
| martinvonz | |
| Alphare |
| hg-reviewers |
pycompat.itervalues(x) just calls x.values(). So this is equivalent.
The rewrite was perfomed via an automated search and replace.
| No Linters Available |
| No Unit Test Coverage |
| Path | Packages | |||
|---|---|---|---|---|
| M | contrib/synthrepo.py (2 lines) | |||
| M | hgext/journal.py (2 lines) | |||
| M | hgext/rebase.py (2 lines) | |||
| M | hgext/remotefilelog/connectionpool.py (3 lines) | |||
| M | hgext/remotefilelog/debugcommands.py (2 lines) | |||
| M | hgext/remotefilelog/repack.py (4 lines) | |||
| M | hgext/transplant.py (2 lines) | |||
| M | mercurial/branchmap.py (4 lines) | |||
| M | mercurial/exchange.py (2 lines) | |||
| M | mercurial/localrepo.py (2 lines) | |||
| M | mercurial/mergestate.py (3 lines) | |||
| M | mercurial/patch.py (6 lines) | |||
| M | mercurial/statprof.py (10 lines) | |||
| M | mercurial/ui.py (4 lines) | |||
| M | tests/test-pathencode.py (2 lines) |
| Commit | Parents | Author | Summary | Date |
|---|---|---|---|---|
| 1f171ca65d04 | 1f02aee8987d | Gregory Szorc | Mar 1 2022, 11:52 PM |
| Status | Author | Revision | |
|---|---|---|---|
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | D12339 ui: use input() directly | |
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg | ||
| Closed | indygreg |
| diffopts = diffutil.diffallopts(ui, {'git': True}) | diffopts = diffutil.diffallopts(ui, {'git': True}) | ||||
| diff = sum( | diff = sum( | ||||
| (d.splitlines() for d in ctx.diff(pctx, opts=diffopts)), [] | (d.splitlines() for d in ctx.diff(pctx, opts=diffopts)), [] | ||||
| ) | ) | ||||
| fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0 | fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0 | ||||
| for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff): | for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff): | ||||
| if isbin: | if isbin: | ||||
| continue | continue | ||||
| added = sum(pycompat.itervalues(lineadd), 0) | added = sum(lineadd.values(), 0) | ||||
| if mar == 'm': | if mar == 'm': | ||||
| if added and lineremove: | if added and lineremove: | ||||
| lineschanged[ | lineschanged[ | ||||
| roundto(added, 5), roundto(lineremove, 5) | roundto(added, 5), roundto(lineremove, 5) | ||||
| ] += 1 | ] += 1 | ||||
| filechanges += 1 | filechanges += 1 | ||||
| elif mar == 'a': | elif mar == 'a': | ||||
| fileadds += 1 | fileadds += 1 | ||||
| for key, it in enumerate(iterables): | for key, it in enumerate(iterables): | ||||
| try: | try: | ||||
| iterable_map[key] = [next(it), key, it] | iterable_map[key] = [next(it), key, it] | ||||
| except StopIteration: | except StopIteration: | ||||
| # empty entry, can be ignored | # empty entry, can be ignored | ||||
| pass | pass | ||||
| while iterable_map: | while iterable_map: | ||||
| value, key, it = order(pycompat.itervalues(iterable_map)) | value, key, it = order(iterable_map.values()) | ||||
| yield value | yield value | ||||
| try: | try: | ||||
| iterable_map[key][0] = next(it) | iterable_map[key][0] = next(it) | ||||
| except StopIteration: | except StopIteration: | ||||
| # this iterable is empty, remove it from consideration | # this iterable is empty, remove it from consideration | ||||
| del iterable_map[key] | del iterable_map[key] | ||||
| rbsrt = rebaseruntime(repo, ui, {}) | rbsrt = rebaseruntime(repo, ui, {}) | ||||
| rbsrt.restorestatus() | rbsrt.restorestatus() | ||||
| state = rbsrt.state | state = rbsrt.state | ||||
| except error.RepoLookupError: | except error.RepoLookupError: | ||||
| # i18n: column positioning for "hg summary" | # i18n: column positioning for "hg summary" | ||||
| msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n') | msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n') | ||||
| ui.write(msg) | ui.write(msg) | ||||
| return | return | ||||
| numrebased = len([i for i in pycompat.itervalues(state) if i >= 0]) | numrebased = len([i for i in state.values() if i >= 0]) | ||||
| # i18n: column positioning for "hg summary" | # i18n: column positioning for "hg summary" | ||||
| ui.write( | ui.write( | ||||
| _(b'rebase: %s, %s (rebase --continue)\n') | _(b'rebase: %s, %s (rebase --continue)\n') | ||||
| % ( | % ( | ||||
| ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased, | ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased, | ||||
| ui.label(_(b'%d remaining'), b'rebase.remaining') | ui.label(_(b'%d remaining'), b'rebase.remaining') | ||||
| % (len(state) - numrebased), | % (len(state) - numrebased), | ||||
| ) | ) | ||||
| # connectionpool.py - class for pooling peer connections for reuse | # connectionpool.py - class for pooling peer connections for reuse | ||||
| # | # | ||||
| # Copyright 2017 Facebook, Inc. | # Copyright 2017 Facebook, Inc. | ||||
| # | # | ||||
| # This software may be used and distributed according to the terms of the | # This software may be used and distributed according to the terms of the | ||||
| # GNU General Public License version 2 or any later version. | # GNU General Public License version 2 or any later version. | ||||
| from mercurial import ( | from mercurial import ( | ||||
| hg, | hg, | ||||
| pycompat, | |||||
| sshpeer, | sshpeer, | ||||
| util, | util, | ||||
| ) | ) | ||||
| _sshv1peer = sshpeer.sshv1peer | _sshv1peer = sshpeer.sshv1peer | ||||
| class connectionpool(object): | class connectionpool(object): | ||||
| peer.__class__ = mypeer | peer.__class__ = mypeer | ||||
| conn = connection(pathpool, peer) | conn = connection(pathpool, peer) | ||||
| return conn | return conn | ||||
| def close(self): | def close(self): | ||||
| for pathpool in pycompat.itervalues(self._pool): | for pathpool in self._pool.values(): | ||||
| for conn in pathpool: | for conn in pathpool: | ||||
| conn.close() | conn.close() | ||||
| del pathpool[:] | del pathpool[:] | ||||
| class connection(object): | class connection(object): | ||||
| def __init__(self, pool, peer): | def __init__(self, pool, peer): | ||||
| self._pool = pool | self._pool = pool | ||||
| decompress = opts.get('decompress') | decompress = opts.get('decompress') | ||||
| for root, dirs, files in os.walk(path): | for root, dirs, files in os.walk(path): | ||||
| for file in files: | for file in files: | ||||
| if file == b"repos": | if file == b"repos": | ||||
| continue | continue | ||||
| filepath = os.path.join(root, file) | filepath = os.path.join(root, file) | ||||
| size, firstnode, mapping = parsefileblob(filepath, decompress) | size, firstnode, mapping = parsefileblob(filepath, decompress) | ||||
| for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping): | for p1, p2, linknode, copyfrom in mapping.values(): | ||||
| if linknode == sha1nodeconstants.nullid: | if linknode == sha1nodeconstants.nullid: | ||||
| actualpath = os.path.relpath(root, path) | actualpath = os.path.relpath(root, path) | ||||
| key = fileserverclient.getcachekey( | key = fileserverclient.getcachekey( | ||||
| b"reponame", actualpath, file | b"reponame", actualpath, file | ||||
| ) | ) | ||||
| ui.status( | ui.status( | ||||
| b"%s %s\n" % (key, os.path.relpath(filepath, path)) | b"%s %s\n" % (key, os.path.relpath(filepath, path)) | ||||
| ) | ) | ||||
| nodes += orphans | nodes += orphans | ||||
| return nodes | return nodes | ||||
| def repackdata(self, ledger, target): | def repackdata(self, ledger, target): | ||||
| ui = self.repo.ui | ui = self.repo.ui | ||||
| maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000) | maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000) | ||||
| byfile = {} | byfile = {} | ||||
| for entry in pycompat.itervalues(ledger.entries): | for entry in ledger.entries.values(): | ||||
| if entry.datasource: | if entry.datasource: | ||||
| byfile.setdefault(entry.filename, {})[entry.node] = entry | byfile.setdefault(entry.filename, {})[entry.node] = entry | ||||
| count = 0 | count = 0 | ||||
| repackprogress = ui.makeprogress( | repackprogress = ui.makeprogress( | ||||
| _(b"repacking data"), unit=self.unit, total=len(byfile) | _(b"repacking data"), unit=self.unit, total=len(byfile) | ||||
| ) | ) | ||||
| for filename, entries in sorted(byfile.items()): | for filename, entries in sorted(byfile.items()): | ||||
| repackprogress.complete() | repackprogress.complete() | ||||
| target.close(ledger=ledger) | target.close(ledger=ledger) | ||||
| def repackhistory(self, ledger, target): | def repackhistory(self, ledger, target): | ||||
| ui = self.repo.ui | ui = self.repo.ui | ||||
| byfile = {} | byfile = {} | ||||
| for entry in pycompat.itervalues(ledger.entries): | for entry in ledger.entries.values(): | ||||
| if entry.historysource: | if entry.historysource: | ||||
| byfile.setdefault(entry.filename, {})[entry.node] = entry | byfile.setdefault(entry.filename, {})[entry.node] = entry | ||||
| progress = ui.makeprogress( | progress = ui.makeprogress( | ||||
| _(b"repacking history"), unit=self.unit, total=len(byfile) | _(b"repacking history"), unit=self.unit, total=len(byfile) | ||||
| ) | ) | ||||
| for filename, entries in sorted(byfile.items()): | for filename, entries in sorted(byfile.items()): | ||||
| ancestors = {} | ancestors = {} | ||||
| list = self.transplants.setdefault(rnode, []) | list = self.transplants.setdefault(rnode, []) | ||||
| list.append(transplantentry(lnode, rnode)) | list.append(transplantentry(lnode, rnode)) | ||||
| def write(self): | def write(self): | ||||
| if self.dirty and self.transplantfile: | if self.dirty and self.transplantfile: | ||||
| if not os.path.isdir(self.path): | if not os.path.isdir(self.path): | ||||
| os.mkdir(self.path) | os.mkdir(self.path) | ||||
| fp = self.opener(self.transplantfile, b'w') | fp = self.opener(self.transplantfile, b'w') | ||||
| for list in pycompat.itervalues(self.transplants): | for list in self.transplants.values(): | ||||
| for t in list: | for t in list: | ||||
| l, r = map(hex, (t.lnode, t.rnode)) | l, r = map(hex, (t.lnode, t.rnode)) | ||||
| fp.write(l + b':' + r + b'\n') | fp.write(l + b':' + r + b'\n') | ||||
| fp.close() | fp.close() | ||||
| self.dirty = False | self.dirty = False | ||||
| def get(self, rnode): | def get(self, rnode): | ||||
| return self.transplants.get(rnode) or [] | return self.transplants.get(rnode) or [] | ||||
| remote. | remote. | ||||
| """ | """ | ||||
| cl = repo.changelog | cl = repo.changelog | ||||
| clrev = cl.rev | clrev = cl.rev | ||||
| clbranchinfo = cl.branchinfo | clbranchinfo = cl.branchinfo | ||||
| rbheads = [] | rbheads = [] | ||||
| closed = set() | closed = set() | ||||
| for bheads in pycompat.itervalues(remotebranchmap): | for bheads in remotebranchmap.values(): | ||||
| rbheads += bheads | rbheads += bheads | ||||
| for h in bheads: | for h in bheads: | ||||
| r = clrev(h) | r = clrev(h) | ||||
| b, c = clbranchinfo(r) | b, c = clbranchinfo(r) | ||||
| if c: | if c: | ||||
| closed.add(h) | closed.add(h) | ||||
| if rbheads: | if rbheads: | ||||
| def iterbranches(self): | def iterbranches(self): | ||||
| for bn, heads in self.items(): | for bn, heads in self.items(): | ||||
| yield (bn, heads) + self._branchtip(heads) | yield (bn, heads) + self._branchtip(heads) | ||||
| def iterheads(self): | def iterheads(self): | ||||
| """returns all the heads""" | """returns all the heads""" | ||||
| self._verifyall() | self._verifyall() | ||||
| return pycompat.itervalues(self._entries) | return self._entries.values() | ||||
| def copy(self): | def copy(self): | ||||
| """return an deep copy of the branchcache object""" | """return an deep copy of the branchcache object""" | ||||
| return type(self)( | return type(self)( | ||||
| self._repo, | self._repo, | ||||
| self._entries, | self._entries, | ||||
| self.tipnode, | self.tipnode, | ||||
| self.tiprev, | self.tiprev, | ||||
| return | return | ||||
| b2caps = bundle2.bundle2caps(pushop.remote) | b2caps = bundle2.bundle2caps(pushop.remote) | ||||
| hasphaseheads = b'heads' in b2caps.get(b'phases', ()) | hasphaseheads = b'heads' in b2caps.get(b'phases', ()) | ||||
| if pushop.remotephases is not None and hasphaseheads: | if pushop.remotephases is not None and hasphaseheads: | ||||
| # check that the remote phase has not changed | # check that the remote phase has not changed | ||||
| checks = {p: [] for p in phases.allphases} | checks = {p: [] for p in phases.allphases} | ||||
| checks[phases.public].extend(pushop.remotephases.publicheads) | checks[phases.public].extend(pushop.remotephases.publicheads) | ||||
| checks[phases.draft].extend(pushop.remotephases.draftroots) | checks[phases.draft].extend(pushop.remotephases.draftroots) | ||||
| if any(pycompat.itervalues(checks)): | if any(checks.values()): | ||||
| for phase in checks: | for phase in checks: | ||||
| checks[phase].sort() | checks[phase].sort() | ||||
| checkdata = phases.binaryencode(checks) | checkdata = phases.binaryencode(checks) | ||||
| bundler.newpart(b'check:phases', data=checkdata) | bundler.newpart(b'check:phases', data=checkdata) | ||||
| @b2partsgenerator(b'changeset') | @b2partsgenerator(b'changeset') | ||||
| def _pushb2ctx(pushop, bundler): | def _pushb2ctx(pushop, bundler): | ||||
| return self._tagscache.tagslist | return self._tagscache.tagslist | ||||
| def nodetags(self, node): | def nodetags(self, node): | ||||
| '''return the tags associated with a node''' | '''return the tags associated with a node''' | ||||
| if not self._tagscache.nodetagscache: | if not self._tagscache.nodetagscache: | ||||
| nodetagscache = {} | nodetagscache = {} | ||||
| for t, n in self._tagscache.tags.items(): | for t, n in self._tagscache.tags.items(): | ||||
| nodetagscache.setdefault(n, []).append(t) | nodetagscache.setdefault(n, []).append(t) | ||||
| for tags in pycompat.itervalues(nodetagscache): | for tags in nodetagscache.values(): | ||||
| tags.sort() | tags.sort() | ||||
| self._tagscache.nodetagscache = nodetagscache | self._tagscache.nodetagscache = nodetagscache | ||||
| return self._tagscache.nodetagscache.get(node, []) | return self._tagscache.nodetagscache.get(node, []) | ||||
| def nodebookmarks(self, node): | def nodebookmarks(self, node): | ||||
| """return the list of bookmarks pointing to the specified node""" | """return the list of bookmarks pointing to the specified node""" | ||||
| return self._bookmarks.names(node) | return self._bookmarks.names(node) | ||||
| import collections | import collections | ||||
| import errno | import errno | ||||
| import shutil | import shutil | ||||
| import struct | import struct | ||||
| import weakref | import weakref | ||||
| from .i18n import _ | from .i18n import _ | ||||
| from .node import ( | from .node import ( | ||||
| bin, | bin, | ||||
| hex, | hex, | ||||
| nullrev, | nullrev, | ||||
| ) | ) | ||||
| from . import ( | from . import ( | ||||
| error, | error, | ||||
| filemerge, | filemerge, | ||||
| pycompat, | |||||
| util, | util, | ||||
| ) | ) | ||||
| from .utils import hashutil | from .utils import hashutil | ||||
| _pack = struct.pack | _pack = struct.pack | ||||
| _unpack = struct.unpack | _unpack = struct.unpack | ||||
| self._results[dfile] = merge_ret, action | self._results[dfile] = merge_ret, action | ||||
| return merge_ret | return merge_ret | ||||
| def counts(self): | def counts(self): | ||||
| """return counts for updated, merged and removed files in this | """return counts for updated, merged and removed files in this | ||||
| session""" | session""" | ||||
| updated, merged, removed = 0, 0, 0 | updated, merged, removed = 0, 0, 0 | ||||
| for r, action in pycompat.itervalues(self._results): | for r, action in self._results.values(): | ||||
| if r is None: | if r is None: | ||||
| updated += 1 | updated += 1 | ||||
| elif r == 0: | elif r == 0: | ||||
| if action == ACTION_REMOVE: | if action == ACTION_REMOVE: | ||||
| removed += 1 | removed += 1 | ||||
| else: | else: | ||||
| merged += 1 | merged += 1 | ||||
| return updated, merged, removed | return updated, merged, removed | ||||
| for newhunk in newpatch.hunks: | for newhunk in newpatch.hunks: | ||||
| if fixoffset: | if fixoffset: | ||||
| newhunk.toline += fixoffset | newhunk.toline += fixoffset | ||||
| applied[newhunk.filename()].append(newhunk) | applied[newhunk.filename()].append(newhunk) | ||||
| else: | else: | ||||
| fixoffset += chunk.removed - chunk.added | fixoffset += chunk.removed - chunk.added | ||||
| return ( | return ( | ||||
| sum( | sum( | ||||
| [ | [h for h in applied.values() if h[0].special() or len(h) > 1], | ||||
| h | |||||
| for h in pycompat.itervalues(applied) | |||||
| if h[0].special() or len(h) > 1 | |||||
| ], | |||||
| [], | [], | ||||
| ), | ), | ||||
| {}, | {}, | ||||
| ) | ) | ||||
| class hunk(object): | class hunk(object): | ||||
| def __init__(self, desc, num, lr, context): | def __init__(self, desc, num, lr, context): | ||||
| sitestat = SiteStats(site) | sitestat = SiteStats(site) | ||||
| stats[site] = sitestat | stats[site] = sitestat | ||||
| sitestat.addtotal() | sitestat.addtotal() | ||||
| if i == 0: | if i == 0: | ||||
| sitestat.addself() | sitestat.addself() | ||||
| return [s for s in pycompat.itervalues(stats)] | return [s for s in stats.values()] | ||||
| class DisplayFormats: | class DisplayFormats: | ||||
| ByLine = 0 | ByLine = 0 | ||||
| ByMethod = 1 | ByMethod = 1 | ||||
| AboutMethod = 2 | AboutMethod = 2 | ||||
| Hotpath = 3 | Hotpath = 3 | ||||
| FlameGraph = 4 | FlameGraph = 4 | ||||
| for sample in data.samples: | for sample in data.samples: | ||||
| root.add(sample.stack[::-1], sample.time - lasttime) | root.add(sample.stack[::-1], sample.time - lasttime) | ||||
| lasttime = sample.time | lasttime = sample.time | ||||
| showtime = kwargs.get('showtime', True) | showtime = kwargs.get('showtime', True) | ||||
| def _write(node, depth, multiple_siblings): | def _write(node, depth, multiple_siblings): | ||||
| site = node.site | site = node.site | ||||
| visiblechildren = [ | visiblechildren = [ | ||||
| c | c for c in node.children.values() if c.count >= (limit * root.count) | ||||
| for c in pycompat.itervalues(node.children) | |||||
| if c.count >= (limit * root.count) | |||||
| ] | ] | ||||
| if site: | if site: | ||||
| indent = depth * 2 - 1 | indent = depth * 2 - 1 | ||||
| filename = (site.filename() + b':').ljust(15) | filename = (site.filename() + b':').ljust(15) | ||||
| function = site.function | function = site.function | ||||
| # lots of string formatting | # lots of string formatting | ||||
| listpattern = ( | listpattern = ( | ||||
| prefix, | prefix, | ||||
| b'line'.rjust(spacing_len), | b'line'.rjust(spacing_len), | ||||
| site.lineno, | site.lineno, | ||||
| b''.ljust(max(0, 4 - len(str(site.lineno)))), | b''.ljust(max(0, 4 - len(str(site.lineno)))), | ||||
| site.getsource(30), | site.getsource(30), | ||||
| ) | ) | ||||
| finalstring = liststring + codestring | finalstring = liststring + codestring | ||||
| childrensamples = sum( | childrensamples = sum([c.count for c in node.children.values()]) | ||||
| [c.count for c in pycompat.itervalues(node.children)] | |||||
| ) | |||||
| # Make frames that performed more than 10% of the operation red | # Make frames that performed more than 10% of the operation red | ||||
| if node.count - childrensamples > (0.1 * root.count): | if node.count - childrensamples > (0.1 * root.count): | ||||
| finalstring = b'\033[91m' + finalstring + b'\033[0m' | finalstring = b'\033[91m' + finalstring + b'\033[0m' | ||||
| # Make frames that didn't actually perform work dark grey | # Make frames that didn't actually perform work dark grey | ||||
| elif node.count - childrensamples == 0: | elif node.count - childrensamples == 0: | ||||
| finalstring = b'\033[90m' + finalstring + b'\033[0m' | finalstring = b'\033[90m' + finalstring + b'\033[0m' | ||||
| fp.write(finalstring + b'\n') | fp.write(finalstring + b'\n') | ||||
| msgfmt should be a newline-terminated format string to log, and | msgfmt should be a newline-terminated format string to log, and | ||||
| *msgargs are %-formatted into it. | *msgargs are %-formatted into it. | ||||
| **opts currently has no defined meanings. | **opts currently has no defined meanings. | ||||
| """ | """ | ||||
| if not self._loggers: | if not self._loggers: | ||||
| return | return | ||||
| activeloggers = [ | activeloggers = [l for l in self._loggers.values() if l.tracked(event)] | ||||
| l for l in pycompat.itervalues(self._loggers) if l.tracked(event) | |||||
| ] | |||||
| if not activeloggers: | if not activeloggers: | ||||
| return | return | ||||
| msg = msgfmt % msgargs | msg = msgfmt % msgargs | ||||
| opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
| # guard against recursion from e.g. ui.debug() | # guard against recursion from e.g. ui.debug() | ||||
| registeredloggers = self._loggers | registeredloggers = self._loggers | ||||
| self._loggers = {} | self._loggers = {} | ||||
| try: | try: | ||||
| if line[-2:] in ('.i', '.d'): | if line[-2:] in ('.i', '.d'): | ||||
| line = line[:-2] | line = line[:-2] | ||||
| if line.startswith('data/'): | if line.startswith('data/'): | ||||
| line = line[5:] | line = line[5:] | ||||
| for c in line: | for c in line: | ||||
| counts[c] += 1 | counts[c] += 1 | ||||
| for c in '\r/\n': | for c in '\r/\n': | ||||
| counts.pop(c, None) | counts.pop(c, None) | ||||
| t = sum(pycompat.itervalues(counts)) / 100.0 | t = sum(counts.values()) / 100.0 | ||||
| fp.write('probtable = (') | fp.write('probtable = (') | ||||
| for i, (k, v) in enumerate( | for i, (k, v) in enumerate( | ||||
| sorted(counts.items(), key=lambda x: x[1], reverse=True) | sorted(counts.items(), key=lambda x: x[1], reverse=True) | ||||
| ): | ): | ||||
| if (i % 5) == 0: | if (i % 5) == 0: | ||||
| fp.write('\n ') | fp.write('\n ') | ||||
| vt = v / t | vt = v / t | ||||
| if vt < 0.0005: | if vt < 0.0005: | ||||