The upstream has made the bundle parts non-seekable to reduce memory usage.
So we need to copy a bundle part if we need to process it later.
Also, fix an apparent issue where part should be scratchbookpart.
durham | |
singhsrb |
Restricted Project |
The upstream has made the bundle parts non-seekable to reduce memory usage.
So we need to copy a bundle part if we need to process it later.
Also, fix an apparent issue where part should be scratchbookpart.
Automatic diff as part of commit; lint not applicable. |
Automatic diff as part of commit; unit tests not applicable. |
# Copyright 2017 Facebook, Inc. | # Copyright 2017 Facebook, Inc. | ||||
# | # | ||||
# This software may be used and distributed according to the terms of the | # This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | # GNU General Public License version 2 or any later version. | ||||
from .common import ( | from .common import ( | ||||
encodebookmarks, | encodebookmarks, | ||||
isremotebooksenabled, | isremotebooksenabled, | ||||
) | ) | ||||
from mercurial import ( | from mercurial import ( | ||||
bundle2, | bundle2, | ||||
changegroup, | changegroup, | ||||
error, | error, | ||||
extensions, | extensions, | ||||
revsetlang, | revsetlang, | ||||
util, | |||||
) | ) | ||||
from mercurial.i18n import _ | from mercurial.i18n import _ | ||||
scratchbranchparttype = 'b2x:infinitepush' | scratchbranchparttype = 'b2x:infinitepush' | ||||
scratchbookmarksparttype = 'b2x:infinitepushscratchbookmarks' | scratchbookmarksparttype = 'b2x:infinitepushscratchbookmarks' | ||||
def getscratchbranchparts(repo, peer, outgoing, confignonforwardmove, | def getscratchbranchparts(repo, peer, outgoing, confignonforwardmove, | ||||
ui, bookmark, create): | ui, bookmark, create): | ||||
to make sure large files are uploaded to lfs | to make sure large files are uploaded to lfs | ||||
''' | ''' | ||||
try: | try: | ||||
lfsmod = extensions.find('lfs') | lfsmod = extensions.find('lfs') | ||||
lfsmod.wrapper.uploadblobsfromrevs(repo, missing) | lfsmod.wrapper.uploadblobsfromrevs(repo, missing) | ||||
except KeyError: | except KeyError: | ||||
# Ignore if lfs extension is not enabled | # Ignore if lfs extension is not enabled | ||||
return | return | ||||
class copiedpart(object): | |||||
"""a copy of unbundlepart content that can be consumed later""" | |||||
def __init__(self, part): | |||||
# copy "public properties" | |||||
self.type = part.type | |||||
self.id = part.id | |||||
self.mandatory = part.mandatory | |||||
self.mandatoryparams = part.mandatoryparams | |||||
self.advisoryparams = part.advisoryparams | |||||
self.params = part.params | |||||
self.mandatorykeys = part.mandatorykeys | |||||
# copy the buffer | |||||
self._io = util.stringio(part.read()) | |||||
def consume(self): | |||||
return | |||||
def read(self, size=None): | |||||
if size is None: | |||||
return self._io.read() | |||||
else: | |||||
return self._io.read(size) |
I wonder if we could use your copy function here too. This code (below) copies the parameters individually, which might be safer than reusing the dictionaries.
No need to do so now. Just an observation at a common copy pattern.