diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -17,8 +17,10 @@ }; use std::fmt; +use std::mem; use std::ops::Deref; use std::ops::Index; +use std::slice; #[derive(Debug, PartialEq)] pub enum NodeMapError { @@ -175,6 +177,8 @@ #[derive(Clone, PartialEq)] pub struct Block([RawElement; 16]); +pub const BLOCK_SIZE: usize = mem::size_of::(); + impl Block { fn new() -> Self { Block([-1; 16]) @@ -262,6 +266,56 @@ } } + /// Create from an opaque bunch of bytes + /// + /// The created `NodeTreeBytes` from `buffer`, + /// of which exactly `amount` bytes are used. + /// + /// - `buffer` could be derived from `PyBuffer` and `Mmap` objects. + /// - `offset` allows for the final file format to include fixed data + /// (generation number, behavioural flags) + /// - `amount` is expressed in bytes, and is not automatically derived from + /// `bytes`, so that a caller that manages them atomically can perform + /// temporary disk serializations and still rollback easily if needed. + /// First use-case for this would be to support Mercurial shell hooks. + /// + /// panics if `buffer` is smaller than `amount` + pub fn load_bytes( + bytes: Box + Send>, + amount: usize, + ) -> Self { + NodeTree::new(Box::new(NodeTreeBytes::new(bytes, amount))) + } + + /// Retrieve added `Block` and the original immutable data + pub fn into_readonly_and_added( + self, + ) -> (Box + Send>, Vec) { + let mut vec = self.growable; + let readonly = self.readonly; + if readonly.last() != Some(&self.root) { + vec.push(self.root); + } + (readonly, vec) + } + + /// Retrieve added `Blocks` as bytes, ready to be written to persistent + /// storage + pub fn into_readonly_and_added_bytes( + self, + ) -> (Box + Send>, Vec) { + let (readonly, vec) = self.into_readonly_and_added(); + let bytes = unsafe { + Vec::from_raw_parts( + vec.as_ptr() as *mut u8, + vec.len() * BLOCK_SIZE, + vec.capacity() * BLOCK_SIZE, + ) + }; + mem::forget(vec); + (readonly, bytes) + } + /// Total number of blocks fn len(&self) -> usize { self.readonly.len() + self.growable.len() + 1 @@ -410,6 +464,38 @@ } } +pub struct NodeTreeBytes { + buffer: Box + Send>, + len_in_blocks: usize, +} + +impl NodeTreeBytes { + fn new( + buffer: Box + Send>, + amount: usize, + ) -> Self { + assert!(buffer.len() >= amount); + let len_in_blocks = amount / BLOCK_SIZE; + NodeTreeBytes { + buffer, + len_in_blocks, + } + } +} + +impl Deref for NodeTreeBytes { + type Target = [Block]; + + fn deref(&self) -> &[Block] { + unsafe { + slice::from_raw_parts( + (&self.buffer).as_ptr() as *const Block, + self.len_in_blocks, + ) + } + } +} + struct NodeTreeVisitor<'n, 'p> { nt: &'n NodeTree, prefix: NodePrefixRef<'p>, @@ -786,4 +872,30 @@ Ok(()) } + + #[test] + fn test_into_added_empty() { + assert!(sample_nodetree().into_readonly_and_added().1.is_empty()); + assert!(sample_nodetree() + .into_readonly_and_added_bytes() + .1 + .is_empty()); + } + + #[test] + fn test_into_added_bytes() -> Result<(), NodeMapError> { + let mut idx = TestNtIndex::new(); + idx.insert(0, "1234")?; + let mut idx = idx.commit(); + idx.insert(4, "cafe")?; + let (_, bytes) = idx.nt.into_readonly_and_added_bytes(); + + // only the root block has been changed + assert_eq!(bytes.len(), BLOCK_SIZE); + // big endian for -2 + assert_eq!(&bytes[4..2 * 4], [255, 255, 255, 254]); + // big endian for -6 + assert_eq!(&bytes[12 * 4..13 * 4], [255, 255, 255, 250]); + Ok(()) + } }