diff --git a/tests/remotefilelog-datapack.py b/tests/remotefilelog-datapack.py --- a/tests/remotefilelog-datapack.py +++ b/tests/remotefilelog-datapack.py @@ -53,11 +53,14 @@ def getFakeHash(self): return ''.join(chr(random.randint(0, 255)) for _ in range(20)) - def createPack(self, revisions=None, version=0): + def createPackInTempDir(self, revisions=None, version=0): + packdir = self.makeTempDir() + return self.createPack(packdir, revisions, version) + + def createPack(self, packdir, revisions=None, version=0): if revisions is None: revisions = [("filename", self.getFakeHash(), nullid, "content")] - packdir = self.makeTempDir() packer = mutabledatapack(mercurial.ui.ui(), packdir, version=version) for args in revisions: @@ -78,7 +81,7 @@ node = self.getHash(content) revisions = [(filename, node, nullid, content)] - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) if self.paramsavailable: self.assertEquals(pack.params.fanoutprefix, SMALLFANOUTPREFIX) @@ -102,7 +105,7 @@ node = self.getHash(content) revisions.append((filename, node, nullid, content)) - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) for filename, node, base, content in revisions: chain = pack.getdeltachain(filename, node) @@ -120,7 +123,7 @@ revisions.append((filename, node, lastnode, content)) lastnode = node - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) # Test that the chain for the final entry has all the others chain = pack.getdeltachain(filename, node) for i in range(10): @@ -147,7 +150,7 @@ blobs[(filename, node, lastnode)] = content revisions.append((filename, node, lastnode, content)) - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) # Verify the pack contents for (filename, node, lastnode), content in sorted(blobs.iteritems()): @@ -167,7 +170,7 @@ 'Z': 'random_string', '_': '\0' * i} revisions.append((filename, node, nullid, content, meta)) - pack = self.createPack(revisions, version=1) + pack = self.createPackInTempDir(revisions, version=1) for name, node, x, content, origmeta in revisions: parsedmeta = pack.getmeta(name, node) # flag == 0 should be optimized out @@ -182,7 +185,7 @@ meta = {constants.METAKEYFLAG: 3} revisions = [(filename, node, nullid, content, meta)] try: - self.createPack(revisions, version=0) + self.createPackInTempDir(revisions, version=0) self.assertTrue(False, "should throw if metadata is not supported") except RuntimeError: pass @@ -199,7 +202,7 @@ revisions.append((filename, node, lastnode, content)) lastnode = node - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) missing = pack.getmissing([("foo", revisions[0][1])]) self.assertFalse(missing) @@ -213,7 +216,7 @@ self.assertEquals(missing, [("foo", fakenode)]) def testAddThrows(self): - pack = self.createPack() + pack = self.createPackInTempDir() try: pack.add('filename', nullid, 'contents') @@ -222,7 +225,7 @@ pass def testBadVersionThrows(self): - pack = self.createPack() + pack = self.createPackInTempDir() path = pack.path + '.datapack' with open(path) as f: raw = f.read() @@ -240,7 +243,7 @@ def testMissingDeltabase(self): fakenode = self.getFakeHash() revisions = [("filename", fakenode, self.getFakeHash(), "content")] - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) chain = pack.getdeltachain("filename", fakenode) self.assertEquals(len(chain), 1) @@ -257,7 +260,7 @@ blobs[(filename, node)] = content revisions.append((filename, node, nullid, content)) - pack = self.createPack(revisions) + pack = self.createPackInTempDir(revisions) if self.paramsavailable: self.assertEquals(pack.params.fanoutprefix, LARGEFANOUTPREFIX) @@ -293,7 +296,7 @@ node = self.getHash(content) revisions.append((filename, node, nullid, content)) - path = self.createPack(revisions).path + path = self.createPackInTempDir(revisions).path # Perf of large multi-get import gc