The option does not seems to do what we want since it does not wait on the background
process actually starting (eg: taking the lock). And the race have been dealt
with another way anyway.
This effectively back out changeset 2c74337e6483.
durin42 |
hg-reviewers |
The option does not seems to do what we want since it does not wait on the background
process actually starting (eg: taking the lock). And the race have been dealt
with another way anyway.
This effectively back out changeset 2c74337e6483.
Automatic diff as part of commit; lint not applicable. |
Automatic diff as part of commit; unit tests not applicable. |
This doesn't apply to stable. I also checked default, and it doesn't apply there either.
This is wrong, because now remotefilelog will do ensurestart=True (the default) which is a significant performance hit for remotefilelog users.
Path | Packages | |||
---|---|---|---|---|
M | hgext/remotefilelog/__init__.py (22 lines) | |||
M | hgext/remotefilelog/repack.py (8 lines) | |||
M | hgext/remotefilelog/shallowrepo.py (10 lines) | |||
M | tests/test-remotefilelog-bgprefetch.t (3 lines) | |||
M | tests/test-remotefilelog-repack-fast.t (4 lines) | |||
M | tests/test-remotefilelog-repack.t (3 lines) |
configitem(b'remotefilelog', b'gcrepack', default=False) | configitem(b'remotefilelog', b'gcrepack', default=False) | ||||
configitem(b'remotefilelog', b'repackonhggc', default=False) | configitem(b'remotefilelog', b'repackonhggc', default=False) | ||||
configitem(b'repack', b'chainorphansbysize', default=True, experimental=True) | configitem(b'repack', b'chainorphansbysize', default=True, experimental=True) | ||||
configitem(b'packs', b'maxpacksize', default=0) | configitem(b'packs', b'maxpacksize', default=0) | ||||
configitem(b'packs', b'maxchainlen', default=1000) | configitem(b'packs', b'maxchainlen', default=1000) | ||||
configitem(b'devel', b'remotefilelog.ensurestart', default=False) | |||||
configitem(b'devel', b'remotefilelog.bg-wait', default=False) | configitem(b'devel', b'remotefilelog.bg-wait', default=False) | ||||
# default TTL limit is 30 days | # default TTL limit is 30 days | ||||
_defaultlimit = 60 * 60 * 24 * 30 | _defaultlimit = 60 * 60 * 24 * 30 | ||||
configitem(b'remotefilelog', b'nodettl', default=_defaultlimit) | configitem(b'remotefilelog', b'nodettl', default=_defaultlimit) | ||||
configitem(b'remotefilelog', b'data.gencountlimit', default=2), | configitem(b'remotefilelog', b'data.gencountlimit', default=2), | ||||
configitem( | configitem( | ||||
def pull(orig, ui, repo, *pats, **opts): | def pull(orig, ui, repo, *pats, **opts): | ||||
result = orig(ui, repo, *pats, **opts) | result = orig(ui, repo, *pats, **opts) | ||||
if isenabled(repo): | if isenabled(repo): | ||||
# prefetch if it's configured | # prefetch if it's configured | ||||
prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch') | prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch') | ||||
bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack') | bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack') | ||||
bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch') | bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch') | ||||
ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart') | |||||
if prefetchrevset: | if prefetchrevset: | ||||
ui.status(_(b"prefetching file contents\n")) | ui.status(_(b"prefetching file contents\n")) | ||||
revs = scmutil.revrange(repo, [prefetchrevset]) | revs = scmutil.revrange(repo, [prefetchrevset]) | ||||
base = repo[b'.'].rev() | base = repo[b'.'].rev() | ||||
if bgprefetch: | if bgprefetch: | ||||
repo.backgroundprefetch( | repo.backgroundprefetch(prefetchrevset, repack=bgrepack) | ||||
prefetchrevset, repack=bgrepack, ensurestart=ensurestart | |||||
) | |||||
else: | else: | ||||
repo.prefetch(revs, base=base) | repo.prefetch(revs, base=base) | ||||
if bgrepack: | if bgrepack: | ||||
repackmod.backgroundrepack( | repackmod.backgroundrepack(repo, incremental=True) | ||||
repo, incremental=True, ensurestart=ensurestart | |||||
) | |||||
elif bgrepack: | elif bgrepack: | ||||
repackmod.backgroundrepack( | repackmod.backgroundrepack(repo, incremental=True) | ||||
repo, incremental=True, ensurestart=ensurestart | |||||
) | |||||
return result | return result | ||||
def exchangepull(orig, repo, remote, *args, **kwargs): | def exchangepull(orig, repo, remote, *args, **kwargs): | ||||
# Hook into the callstream/getbundle to insert bundle capabilities | # Hook into the callstream/getbundle to insert bundle capabilities | ||||
# during a pull. | # during a pull. | ||||
def localgetbundle( | def localgetbundle( | ||||
opts = pycompat.byteskwargs(opts) | opts = pycompat.byteskwargs(opts) | ||||
if not isenabled(repo): | if not isenabled(repo): | ||||
raise error.Abort(_(b"repo is not shallow")) | raise error.Abort(_(b"repo is not shallow")) | ||||
opts = resolveprefetchopts(ui, opts) | opts = resolveprefetchopts(ui, opts) | ||||
revs = scmutil.revrange(repo, opts.get(b'rev')) | revs = scmutil.revrange(repo, opts.get(b'rev')) | ||||
repo.prefetch(revs, opts.get(b'base'), pats, opts) | repo.prefetch(revs, opts.get(b'base'), pats, opts) | ||||
ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart') | |||||
# Run repack in background | # Run repack in background | ||||
if opts.get(b'repack'): | if opts.get(b'repack'): | ||||
repackmod.backgroundrepack( | repackmod.backgroundrepack(repo, incremental=True) | ||||
repo, incremental=True, ensurestart=ensurestart | |||||
) | |||||
@command( | @command( | ||||
b'repack', | b'repack', | ||||
[ | [ | ||||
(b'', b'background', None, _(b'run in a background process'), None), | (b'', b'background', None, _(b'run in a background process'), None), | ||||
(b'', b'incremental', None, _(b'do an incremental repack'), None), | (b'', b'incremental', None, _(b'do an incremental repack'), None), | ||||
( | ( | ||||
b'', | b'', | ||||
b'packsonly', | b'packsonly', | ||||
None, | None, | ||||
_(b'only repack packs (skip loose objects)'), | _(b'only repack packs (skip loose objects)'), | ||||
None, | None, | ||||
), | ), | ||||
], | ], | ||||
_(b'hg repack [OPTIONS]'), | _(b'hg repack [OPTIONS]'), | ||||
) | ) | ||||
def repack_(ui, repo, *pats, **opts): | def repack_(ui, repo, *pats, **opts): | ||||
if opts.get(r'background'): | if opts.get(r'background'): | ||||
ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart') | |||||
repackmod.backgroundrepack( | repackmod.backgroundrepack( | ||||
repo, | repo, | ||||
incremental=opts.get(r'incremental'), | incremental=opts.get(r'incremental'), | ||||
packsonly=opts.get(r'packsonly', False), | packsonly=opts.get(r'packsonly', False), | ||||
ensurestart=ensurestart, | |||||
) | ) | ||||
return | return | ||||
options = {b'packsonly': opts.get(r'packsonly')} | options = {b'packsonly': opts.get(r'packsonly')} | ||||
try: | try: | ||||
if opts.get(r'incremental'): | if opts.get(r'incremental'): | ||||
repackmod.incrementalrepack(repo, options=options) | repackmod.incrementalrepack(repo, options=options) | ||||
else: | else: | ||||
repackmod.fullrepack(repo, options=options) | repackmod.fullrepack(repo, options=options) | ||||
except repackmod.RepackAlreadyRunning as ex: | except repackmod.RepackAlreadyRunning as ex: | ||||
# Don't propogate the exception if the repack is already in | # Don't propogate the exception if the repack is already in | ||||
# progress, since we want the command to exit 0. | # progress, since we want the command to exit 0. | ||||
repo.ui.warn(b'%s\n' % ex) | repo.ui.warn(b'%s\n' % ex) |
osutil = policy.importmod(r'osutil') | osutil = policy.importmod(r'osutil') | ||||
class RepackAlreadyRunning(error.Abort): | class RepackAlreadyRunning(error.Abort): | ||||
pass | pass | ||||
def backgroundrepack( | def backgroundrepack(repo, incremental=True, packsonly=False): | ||||
repo, incremental=True, packsonly=False, ensurestart=False | |||||
): | |||||
cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'repack'] | cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'repack'] | ||||
msg = _(b"(running background repack)\n") | msg = _(b"(running background repack)\n") | ||||
if incremental: | if incremental: | ||||
cmd.append(b'--incremental') | cmd.append(b'--incremental') | ||||
msg = _(b"(running background incremental repack)\n") | msg = _(b"(running background incremental repack)\n") | ||||
if packsonly: | if packsonly: | ||||
cmd.append(b'--packsonly') | cmd.append(b'--packsonly') | ||||
repo.ui.warn(msg) | repo.ui.warn(msg) | ||||
# We know this command will find a binary, so don't block on it starting. | # We know this command will find a binary, so don't block on it starting. | ||||
kwargs = {} | kwargs = {} | ||||
if repo.ui.configbool(b'devel', b'remotefilelog.bg-wait'): | if repo.ui.configbool(b'devel', b'remotefilelog.bg-wait'): | ||||
kwargs['record_wait'] = repo.ui.atexit | kwargs['record_wait'] = repo.ui.atexit | ||||
procutil.runbgcommand( | procutil.runbgcommand(cmd, encoding.environ, ensurestart=False, **kwargs) | ||||
cmd, encoding.environ, ensurestart=ensurestart, **kwargs | |||||
) | |||||
def fullrepack(repo, options=None): | def fullrepack(repo, options=None): | ||||
"""If ``packsonly`` is True, stores creating only loose objects are skipped. | """If ``packsonly`` is True, stores creating only loose objects are skipped. | ||||
""" | """ | ||||
if util.safehasattr(repo, 'shareddatastores'): | if util.safehasattr(repo, 'shareddatastores'): | ||||
datasource = contentstore.unioncontentstore(*repo.shareddatastores) | datasource = contentstore.unioncontentstore(*repo.shareddatastores) | ||||
historysource = metadatastore.unionmetadatastore( | historysource = metadatastore.unionmetadatastore( |
if fparent1 != nullid: | if fparent1 != nullid: | ||||
files.append((f, hex(fparent1))) | files.append((f, hex(fparent1))) | ||||
self.fileservice.prefetch(files) | self.fileservice.prefetch(files) | ||||
return super(shallowrepository, self).commitctx( | return super(shallowrepository, self).commitctx( | ||||
ctx, error=error, origctx=origctx | ctx, error=error, origctx=origctx | ||||
) | ) | ||||
def backgroundprefetch( | def backgroundprefetch( | ||||
self, | self, revs, base=None, repack=False, pats=None, opts=None | ||||
revs, | |||||
base=None, | |||||
repack=False, | |||||
pats=None, | |||||
opts=None, | |||||
ensurestart=False, | |||||
): | ): | ||||
"""Runs prefetch in background with optional repack | """Runs prefetch in background with optional repack | ||||
""" | """ | ||||
cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch'] | cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch'] | ||||
if repack: | if repack: | ||||
cmd.append(b'--repack') | cmd.append(b'--repack') | ||||
if revs: | if revs: | ||||
cmd += [b'-r', revs] | cmd += [b'-r', revs] | ||||
# We know this command will find a binary, so don't block | # We know this command will find a binary, so don't block | ||||
# on it starting. | # on it starting. | ||||
kwargs = {} | kwargs = {} | ||||
if repo.ui.configbool(b'devel', b'remotefilelog.bg-wait'): | if repo.ui.configbool(b'devel', b'remotefilelog.bg-wait'): | ||||
kwargs['record_wait'] = repo.ui.atexit | kwargs['record_wait'] = repo.ui.atexit | ||||
procutil.runbgcommand( | procutil.runbgcommand( | ||||
cmd, encoding.environ, ensurestart=ensurestart, **kwargs | cmd, encoding.environ, ensurestart=False, **kwargs | ||||
) | ) | ||||
def prefetch(self, revs, base=None, pats=None, opts=None): | def prefetch(self, revs, base=None, pats=None, opts=None): | ||||
"""Prefetches all the necessary file revisions for the given revs | """Prefetches all the necessary file revisions for the given revs | ||||
Optionally runs repack in background | Optionally runs repack in background | ||||
""" | """ | ||||
with repo._lock( | with repo._lock( | ||||
repo.svfs, | repo.svfs, |
#require no-windows | #require no-windows | ||||
$ . "$TESTDIR/remotefilelog-library.sh" | $ . "$TESTDIR/remotefilelog-library.sh" | ||||
# devel.remotefilelog.ensurestart: reduce race condition with | |||||
# waiton{repack/prefetch} | |||||
$ cat >> $HGRCPATH <<EOF | $ cat >> $HGRCPATH <<EOF | ||||
> [devel] | > [devel] | ||||
> remotefilelog.ensurestart=True | |||||
> remotefilelog.bg-wait=True | > remotefilelog.bg-wait=True | ||||
> EOF | > EOF | ||||
$ hg init master | $ hg init master | ||||
$ cd master | $ cd master | ||||
$ cat >> .hg/hgrc <<EOF | $ cat >> .hg/hgrc <<EOF | ||||
> [remotefilelog] | > [remotefilelog] | ||||
> server=True | > server=True |
#require no-windows | #require no-windows | ||||
$ . "$TESTDIR/remotefilelog-library.sh" | $ . "$TESTDIR/remotefilelog-library.sh" | ||||
# devel.remotefilelog.ensurestart: reduce race condition with | |||||
# waiton{repack/prefetch} | |||||
$ cat >> $HGRCPATH <<EOF | $ cat >> $HGRCPATH <<EOF | ||||
> [remotefilelog] | > [remotefilelog] | ||||
> fastdatapack=True | > fastdatapack=True | ||||
> [devel] | > [devel] | ||||
> remotefilelog.ensurestart=True | |||||
> remotefilelog.bg-wait=True | > remotefilelog.bg-wait=True | ||||
> EOF | > EOF | ||||
$ hg init master | $ hg init master | ||||
$ cd master | $ cd master | ||||
$ cat >> .hg/hgrc <<EOF | $ cat >> .hg/hgrc <<EOF | ||||
> [remotefilelog] | > [remotefilelog] | ||||
> server=True | > server=True |
#require no-windows | #require no-windows | ||||
$ . "$TESTDIR/remotefilelog-library.sh" | $ . "$TESTDIR/remotefilelog-library.sh" | ||||
# devel.remotefilelog.ensurestart: reduce race condition with | |||||
# waiton{repack/prefetch} | |||||
$ cat >> $HGRCPATH <<EOF | $ cat >> $HGRCPATH <<EOF | ||||
> [devel] | > [devel] | ||||
> remotefilelog.ensurestart=True | |||||
> remotefilelog.bg-wait=True | > remotefilelog.bg-wait=True | ||||
> EOF | > EOF | ||||
$ hg init master | $ hg init master | ||||
$ cd master | $ cd master | ||||
$ cat >> .hg/hgrc <<EOF | $ cat >> .hg/hgrc <<EOF | ||||
> [remotefilelog] | > [remotefilelog] | ||||
> server=True | > server=True |