diff --git a/cstore/py-cdatapack.h b/cstore/py-cdatapack.h --- a/cstore/py-cdatapack.h +++ b/cstore/py-cdatapack.h @@ -450,6 +450,88 @@ } /** + * Finds a node and returns its delta entry (delta, deltabasenode, + * meta) tuple if found. + */ +static PyObject *cdatapack_getdelta( + py_cdatapack *self, + PyObject *args) { + const char *node; + Py_ssize_t node_sz; + + // 1. Parse the args + if (!PyArg_ParseTuple(args, "s#", &node, &node_sz)) { + return NULL; + } + + if (node_sz != NODE_SZ) { + PyErr_Format(PyExc_ValueError, "node must be %d bytes long", NODE_SZ); + return NULL; + } + + // 2. Read the delta chain + pack_index_entry_t index_entry; + + if (find(self->handle, (const uint8_t *) node, &index_entry) == false) { + PyErr_SetObject(PyExc_KeyError, PyTuple_GET_ITEM(args, 0)); + return NULL; + } + + delta_chain_link_t link; + + get_delta_chain_link_result_t next = getdeltachainlink( + self->handle, + ((uint8_t *) self->handle->data_mmap) + index_entry.data_offset, + &link); + + if (next.code != GET_DELTA_CHAIN_LINK_OK) { + PyErr_SetObject(PyExc_KeyError, PyTuple_GET_ITEM(args, 0)); + return NULL; + } + + // Populate the link.delta pointer + if (!uncompressdeltachainlink(&link)) { + PyErr_Format(PyExc_ValueError, "unable to decompress pack entry"); + return NULL; + } + + // 3. Convert it into python objects + PyObject *tuple = NULL; + PyObject *delta = NULL, *deltabasenode = NULL, *meta = + NULL; + + delta = PyBytes_FromStringAndSize( + (const char *) link.delta, (Py_ssize_t) link.delta_sz); + deltabasenode = PyBytes_FromStringAndSize( + (const char *) link.deltabase_node, NODE_SZ); + meta = readpymeta(&link); + + if (deltabasenode != NULL && + delta != NULL && + meta != NULL) { + tuple = PyTuple_Pack(3, delta, deltabasenode, meta); + } + + Py_XDECREF(delta); + Py_XDECREF(deltabasenode); + Py_XDECREF(meta); + + if (tuple == NULL) { + goto err_cleanup; + } + + goto cleanup; + +err_cleanup: + Py_XDECREF(tuple); + tuple = NULL; + +cleanup: + free((void *)link.delta); + return tuple; +} + +/** * Finds a node and returns a list of (filename, node, filename, delta base * node, delta) tuples if found. */ @@ -575,6 +657,10 @@ METH_VARARGS, "Finds a node and returns a (node, deltabase index offset, " "data offset, data size) tuple if found."}, + {"getdelta", (PyCFunction)cdatapack_getdelta, + METH_VARARGS, + "Finds a node and returns its delta entry (delta, deltabasename, " + "deltabasenode, meta) tuple if found."}, {"getdeltachain", (PyCFunction)cdatapack_getdeltachain, METH_VARARGS, "Finds a node and returns a list of (filename, node, filename, delta " diff --git a/remotefilelog/contentstore.py b/remotefilelog/contentstore.py --- a/remotefilelog/contentstore.py +++ b/remotefilelog/contentstore.py @@ -62,6 +62,17 @@ return text + def getdelta(self, name, node): + """Return the single delta entry for the given name/node pair. + """ + for store in self.stores: + try: + return store.getdelta(name, node) + except KeyError: + pass + + raise KeyError((name, hex(node))) + def getdeltachain(self, name, node): """Returns the deltachain for the given name/node pair. @@ -165,6 +176,12 @@ revision = shallowutil.createrevlogtext(content, copyfrom, copyrev) return revision + def getdelta(self, name, node): + # Since remotefilelog content stores only contain full texts, just + # return that. + revision = self.get(name, node) + return revision, name, nullid, self.getmeta(name, node) + def getdeltachain(self, name, node): # Since remotefilelog content stores just contain full texts, we return # a fake delta chain that just consists of a single full text revision. @@ -209,6 +226,10 @@ fetchdata=True) return self._shared.get(name, node) + def getdelta(self, name, node): + revision = self.get(name, node) + return revision, name, nullid, self._shared.getmeta(name, node) + def getdeltachain(self, name, node): # Since our remote content stores just contain full texts, we return a # fake delta chain that just consists of a single full text revision. @@ -242,6 +263,10 @@ def get(self, name, node): return self._revlog(name).revision(node, raw=True) + def getdelta(self, name, node): + revision = self.get(name, node) + return revision, name, nullid, self.getmeta(name, node) + def getdeltachain(self, name, node): revision = self.get(name, node) return [(name, node, None, nullid, revision)] diff --git a/remotefilelog/datapack.py b/remotefilelog/datapack.py --- a/remotefilelog/datapack.py +++ b/remotefilelog/datapack.py @@ -56,6 +56,21 @@ raise KeyError((name, hex(node))) + def getdelta(self, name, node): + for pack in self.packs: + try: + return pack.getdelta(name, node) + except KeyError: + pass + + for pack in self.refresh(): + try: + return pack.getdelta(name, node) + except KeyError: + pass + + raise KeyError((name, hex(node))) + def getdeltachain(self, name, node): for pack in self.packs: try: @@ -123,6 +138,20 @@ return meta + def getdelta(self, name, node): + value = self._find(node) + if value is None: + raise KeyError((name, hex(node))) + + node, deltabaseoffset, offset, size = value + entry = self._readentry(offset, size, getmeta=True) + filename, node, deltabasenode, delta, meta = entry + + # If we've read a lot of data from the mmap, free some memory. + self.freememory() + + return delta, filename, deltabasenode, meta + def getdeltachain(self, name, node): value = self._find(node) if value is None: @@ -328,6 +357,14 @@ def getmeta(self, name, node): return self.datapack.getmeta(node) + def getdelta(self, name, node): + result = self.datapack.getdelta(node) + if result is None: + raise KeyError((name, hex(node))) + + delta, deltabasenode, meta = result + return delta, name, deltabasenode, meta + def getdeltachain(self, name, node): result = self.datapack.getdeltachain(node) if result is None: diff --git a/tests/test-remotefilelog-datapack.py b/tests/test-remotefilelog-datapack.py --- a/tests/test-remotefilelog-datapack.py +++ b/tests/test-remotefilelog-datapack.py @@ -108,11 +108,14 @@ filename = "foo%s" % i content = "abcdef%s" % i node = self.getHash(content) - revisions.append((filename, node, nullid, content)) + revisions.append((filename, node, self.getFakeHash(), content)) pack = self.createPack(revisions) for filename, node, base, content in revisions: + entry = pack.getdelta(filename, node) + self.assertEquals((content, filename, base, {}), entry) + chain = pack.getdeltachain(filename, node) self.assertEquals(content, chain[0][4]) @@ -129,6 +132,11 @@ lastnode = node pack = self.createPack(revisions) + + entry = pack.getdelta(filename, revisions[0][1]) + realvalue = (revisions[0][3], filename, revisions[0][2], {}) + self.assertEquals(entry, realvalue) + # Test that the chain for the final entry has all the others chain = pack.getdeltachain(filename, node) for i in range(10):