Details
Details
- Reviewers
- None
- Group Reviewers
hg-reviewers - Commits
- rHG9446d5aa0f32: py3: convert strings to bytes in tests/test-remotefilelog-histpack.py
Diff Detail
Diff Detail
- Repository
- rHG Mercurial
- Lint
Lint Skipped - Unit
Unit Tests Skipped
( )
| hg-reviewers |
| Lint Skipped |
| Unit Tests Skipped |
| Path | Packages | |||
|---|---|---|---|---|
| M | tests/test-remotefilelog-histpack.py (19 lines) |
| Commit | Parents | Author | Summary | Date |
|---|---|---|---|---|
| Pulkit Goyal | Nov 26 2018, 7:36 AM |
| def tearDown(self): | def tearDown(self): | ||||
| for d in self.tempdirs: | for d in self.tempdirs: | ||||
| shutil.rmtree(d) | shutil.rmtree(d) | ||||
| def makeTempDir(self): | def makeTempDir(self): | ||||
| tempdir = tempfile.mkdtemp() | tempdir = tempfile.mkdtemp() | ||||
| self.tempdirs.append(tempdir) | self.tempdirs.append(tempdir) | ||||
| return tempdir | return pycompat.fsencode(tempdir) | ||||
| def getHash(self, content): | def getHash(self, content): | ||||
| return hashlib.sha1(content).digest() | return hashlib.sha1(content).digest() | ||||
| def getFakeHash(self): | def getFakeHash(self): | ||||
| return ''.join(chr(random.randint(0, 255)) for _ in range(20)) | return b''.join(pycompat.bytechr(random.randint(0, 255)) | ||||
| for _ in range(20)) | |||||
| def createPack(self, revisions=None): | def createPack(self, revisions=None): | ||||
| """Creates and returns a historypack containing the specified revisions. | """Creates and returns a historypack containing the specified revisions. | ||||
| `revisions` is a list of tuples, where each tuple contains a filanem, | `revisions` is a list of tuples, where each tuple contains a filanem, | ||||
| node, p1node, p2node, and linknode. | node, p1node, p2node, and linknode. | ||||
| """ | """ | ||||
| if revisions is None: | if revisions is None: | ||||
| revisions = [("filename", self.getFakeHash(), nullid, nullid, | revisions = [("filename", self.getFakeHash(), nullid, nullid, | ||||
| self.getFakeHash(), None)] | self.getFakeHash(), None)] | ||||
| packdir = self.makeTempDir() | packdir = pycompat.fsencode(self.makeTempDir()) | ||||
| packer = historypack.mutablehistorypack(uimod.ui(), packdir, | packer = historypack.mutablehistorypack(uimod.ui(), packdir, | ||||
| version=2) | version=2) | ||||
| for filename, node, p1, p2, linknode, copyfrom in revisions: | for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
| packer.add(filename, node, p1, p2, linknode, copyfrom) | packer.add(filename, node, p1, p2, linknode, copyfrom) | ||||
| path = packer.close() | path = packer.close() | ||||
| return historypack.historypack(path) | return historypack.historypack(path) | ||||
| self.assertEquals(linknode, actual[2]) | self.assertEquals(linknode, actual[2]) | ||||
| self.assertEquals(copyfrom, actual[3]) | self.assertEquals(copyfrom, actual[3]) | ||||
| def testAddAncestorChain(self): | def testAddAncestorChain(self): | ||||
| """Test putting multiple revisions in into a pack and read the ancestor | """Test putting multiple revisions in into a pack and read the ancestor | ||||
| chain. | chain. | ||||
| """ | """ | ||||
| revisions = [] | revisions = [] | ||||
| filename = "foo" | filename = b"foo" | ||||
| lastnode = nullid | lastnode = nullid | ||||
| for i in range(10): | for i in range(10): | ||||
| node = self.getFakeHash() | node = self.getFakeHash() | ||||
| revisions.append((filename, node, lastnode, nullid, nullid, None)) | revisions.append((filename, node, lastnode, nullid, nullid, None)) | ||||
| lastnode = node | lastnode = node | ||||
| # revisions must be added in topological order, newest first | # revisions must be added in topological order, newest first | ||||
| revisions = list(reversed(revisions)) | revisions = list(reversed(revisions)) | ||||
| """Pack many related and unrelated ancestors. | """Pack many related and unrelated ancestors. | ||||
| """ | """ | ||||
| # Build a random pack file | # Build a random pack file | ||||
| allentries = {} | allentries = {} | ||||
| ancestorcounts = {} | ancestorcounts = {} | ||||
| revisions = [] | revisions = [] | ||||
| random.seed(0) | random.seed(0) | ||||
| for i in range(100): | for i in range(100): | ||||
| filename = "filename-%s" % i | filename = b"filename-%d" % i | ||||
| entries = [] | entries = [] | ||||
| p2 = nullid | p2 = nullid | ||||
| linknode = nullid | linknode = nullid | ||||
| for j in range(random.randint(1, 100)): | for j in range(random.randint(1, 100)): | ||||
| node = self.getFakeHash() | node = self.getFakeHash() | ||||
| p1 = nullid | p1 = nullid | ||||
| if len(entries) > 0: | if len(entries) > 0: | ||||
| p1 = entries[random.randint(0, len(entries) - 1)] | p1 = entries[random.randint(0, len(entries) - 1)] | ||||
| ep1, ep2, elinknode = allentries[(filename, anode)] | ep1, ep2, elinknode = allentries[(filename, anode)] | ||||
| self.assertEquals(ap1, ep1) | self.assertEquals(ap1, ep1) | ||||
| self.assertEquals(ap2, ep2) | self.assertEquals(ap2, ep2) | ||||
| self.assertEquals(alinknode, elinknode) | self.assertEquals(alinknode, elinknode) | ||||
| self.assertEquals(copyfrom, None) | self.assertEquals(copyfrom, None) | ||||
| def testGetNodeInfo(self): | def testGetNodeInfo(self): | ||||
| revisions = [] | revisions = [] | ||||
| filename = "foo" | filename = b"foo" | ||||
| lastnode = nullid | lastnode = nullid | ||||
| for i in range(10): | for i in range(10): | ||||
| node = self.getFakeHash() | node = self.getFakeHash() | ||||
| revisions.append((filename, node, lastnode, nullid, nullid, None)) | revisions.append((filename, node, lastnode, nullid, nullid, None)) | ||||
| lastnode = node | lastnode = node | ||||
| pack = self.createPack(revisions) | pack = self.createPack(revisions) | ||||
| # Test that getnodeinfo returns the expected results | # Test that getnodeinfo returns the expected results | ||||
| for filename, node, p1, p2, linknode, copyfrom in revisions: | for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
| ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) | ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) | ||||
| self.assertEquals(ap1, p1) | self.assertEquals(ap1, p1) | ||||
| self.assertEquals(ap2, p2) | self.assertEquals(ap2, p2) | ||||
| self.assertEquals(alinknode, linknode) | self.assertEquals(alinknode, linknode) | ||||
| self.assertEquals(acopyfrom, copyfrom) | self.assertEquals(acopyfrom, copyfrom) | ||||
| def testGetMissing(self): | def testGetMissing(self): | ||||
| """Test the getmissing() api. | """Test the getmissing() api. | ||||
| """ | """ | ||||
| revisions = [] | revisions = [] | ||||
| filename = "foo" | filename = b"foo" | ||||
| for i in range(10): | for i in range(10): | ||||
| node = self.getFakeHash() | node = self.getFakeHash() | ||||
| p1 = self.getFakeHash() | p1 = self.getFakeHash() | ||||
| p2 = self.getFakeHash() | p2 = self.getFakeHash() | ||||
| linknode = self.getFakeHash() | linknode = self.getFakeHash() | ||||
| revisions.append((filename, node, p1, p2, linknode, None)) | revisions.append((filename, node, p1, p2, linknode, None)) | ||||
| pack = self.createPack(revisions) | pack = self.createPack(revisions) | ||||
| # Test getmissing on a non-existant filename | # Test getmissing on a non-existant filename | ||||
| missing = pack.getmissing([("bar", fakenode)]) | missing = pack.getmissing([("bar", fakenode)]) | ||||
| self.assertEquals(missing, [("bar", fakenode)]) | self.assertEquals(missing, [("bar", fakenode)]) | ||||
| def testAddThrows(self): | def testAddThrows(self): | ||||
| pack = self.createPack() | pack = self.createPack() | ||||
| try: | try: | ||||
| pack.add('filename', nullid, nullid, nullid, nullid, None) | pack.add(b'filename', nullid, nullid, nullid, nullid, None) | ||||
| self.assertTrue(False, "historypack.add should throw") | self.assertTrue(False, "historypack.add should throw") | ||||
| except RuntimeError: | except RuntimeError: | ||||
| pass | pass | ||||
| def testBadVersionThrows(self): | def testBadVersionThrows(self): | ||||
| pack = self.createPack() | pack = self.createPack() | ||||
| path = pack.path + '.histpack' | path = pack.path + '.histpack' | ||||
| with open(path) as f: | with open(path) as f: | ||||
| pass | pass | ||||
| def testLargePack(self): | def testLargePack(self): | ||||
| """Test creating and reading from a large pack with over X entries. | """Test creating and reading from a large pack with over X entries. | ||||
| This causes it to use a 2^16 fanout table instead.""" | This causes it to use a 2^16 fanout table instead.""" | ||||
| total = basepack.SMALLFANOUTCUTOFF + 1 | total = basepack.SMALLFANOUTCUTOFF + 1 | ||||
| revisions = [] | revisions = [] | ||||
| for i in xrange(total): | for i in xrange(total): | ||||
| filename = "foo-%s" % i | filename = b"foo-%d" % i | ||||
| node = self.getFakeHash() | node = self.getFakeHash() | ||||
| p1 = self.getFakeHash() | p1 = self.getFakeHash() | ||||
| p2 = self.getFakeHash() | p2 = self.getFakeHash() | ||||
| linknode = self.getFakeHash() | linknode = self.getFakeHash() | ||||
| revisions.append((filename, node, p1, p2, linknode, None)) | revisions.append((filename, node, p1, p2, linknode, None)) | ||||
| pack = self.createPack(revisions) | pack = self.createPack(revisions) | ||||
| self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) | self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) | ||||