浏览代码

Refined the internal interfaces and add the mark functionality

Chen Chengjun 1 年之前
父节点
当前提交
990acdff23
共有 8 个文件被更改,包括 833 次插入282 次删除
  1. 28 29
      src/cow.rs
  2. 338 148
      src/cursor.rs
  3. 19 50
      src/entry.rs
  4. 2 0
      src/lib.rs
  5. 61 0
      src/mark.rs
  6. 169 32
      src/node.rs
  7. 111 10
      src/test.rs
  8. 105 13
      src/xarray.rs

+ 28 - 29
src/cow.rs

@@ -1,44 +1,43 @@
 use crate::*;
 
-/// Provide a method for XArray and XNode to check whether copy-on-write is necessary and perform it.
-pub trait CowCheck<I: ItemEntry> {
-    /// By examining the target entry that is about to be operated on,
-    /// perform copy-on-write when the target entry is subject to a mutable operation and is shared with other XArrays.
-    fn copy_on_write<'a>(&'a mut self, entry: &'a XEntry<I>, offset: u8) -> &'a XEntry<I>;
+/// The COW trait provides the capability for Copy-On-Write (COW) behavior to structures related to XArray,
+/// allowing them to perform COW operations on their internal XEntries.
+pub(crate) trait Cow<I: ItemEntry> {
+    /// Check if the target entry that is about to be operated on need to perform COW.
+    /// If the target entry is subject to a mutable operation and is shared with other XArrays,
+    /// perform the COW and return the copied XEntry with `Some()`, else return `None`.
+    fn copy_if_shared(&self, entry: &XEntry<I>) -> Option<XEntry<I>>;
 }
 
-impl<I: ItemEntry> CowCheck<I> for XNodeInner<I> {
-    default fn copy_on_write<'a>(&'a mut self, entry: &'a XEntry<I>, _offset: u8) -> &'a XEntry<I> {
-        entry
+impl<I: ItemEntry> Cow<I> for XNodeInner<I> {
+    default fn copy_if_shared(&self, _entry: &XEntry<I>) -> Option<XEntry<I>> {
+        None
     }
 }
 
-impl<I: ItemEntry + Clone> CowCheck<I> for XNodeInner<I> {
-    fn copy_on_write<'a>(&'a mut self, entry: &'a XEntry<I>, offset: u8) -> &'a XEntry<I> {
-        if entry.is_node() && entry.node_strong_count().unwrap() > 1 {
-            let new_entry = deep_clone_node_entry(entry);
-            let _ = self.set_entry(offset, new_entry);
-            self.entry(offset)
-        } else {
-            entry
-        }
+impl<I: ItemEntry + Clone> Cow<I> for XNodeInner<I> {
+    fn copy_if_shared(&self, entry: &XEntry<I>) -> Option<XEntry<I>> {
+        copy_if_shared(entry)
     }
 }
 
-impl<I: ItemEntry> CowCheck<I> for XArray<I> {
-    default fn copy_on_write<'a>(&'a mut self, entry: &'a XEntry<I>, _offset: u8) -> &'a XEntry<I> {
-        entry
+impl<I: ItemEntry, M: ValidMark> Cow<I> for XArray<I, M> {
+    default fn copy_if_shared(&self, _entry: &XEntry<I>) -> Option<XEntry<I>> {
+        None
     }
 }
 
-impl<I: ItemEntry + Clone> CowCheck<I> for XArray<I> {
-    fn copy_on_write<'a>(&'a mut self, entry: &'a XEntry<I>, _offset: u8) -> &'a XEntry<I> {
-        if entry.is_node() && entry.node_strong_count().unwrap() > 1 {
-            let new_entry = deep_clone_node_entry(entry);
-            let _ = self.set_head(new_entry);
-            self.head()
-        } else {
-            entry
-        }
+impl<I: ItemEntry + Clone, M: ValidMark> Cow<I> for XArray<I, M> {
+    fn copy_if_shared(&self, entry: &XEntry<I>) -> Option<XEntry<I>> {
+        copy_if_shared(entry)
+    }
+}
+
+fn copy_if_shared<I: ItemEntry + Clone>(entry: &XEntry<I>) -> Option<XEntry<I>> {
+    if entry.is_node() && entry.node_strong_count().unwrap() > 1 {
+        let new_entry = deep_clone_node_entry(entry);
+        Some(new_entry)
+    } else {
+        None
     }
 }

+ 338 - 148
src/cursor.rs

@@ -1,53 +1,141 @@
+use crate::*;
 use std::marker::PhantomData;
+use std::ops::{Deref, DerefMut};
+use std::sync::Arc;
 
-use crate::*;
+/// CursorState represents the current state of the cursor. Currently, there are two possible states:
+/// 1. inactive: the initial state where the cursor is not positioned on any node.
+/// 2. positioned on a node: this state includes information about the node the cursor is on,
+/// as well as the offset of the entry that needs to be operated on within the slots of the current node.
+enum CursorState<'a, I, Operation>
+where
+    I: ItemEntry,
+{
+    Inactive,
+    AtNode {
+        node: &'a XNode<I, Operation>,
+        operation_offset: u8,
+    },
+}
+
+impl<'a, I: ItemEntry, Operation> CursorState<'a, I, Operation> {
+    fn default() -> Self {
+        Self::Inactive
+    }
+
+    fn arrive_node(&mut self, node: &'a XNode<I, Operation>, operation_offset: u8) {
+        *self = Self::AtNode {
+            node,
+            operation_offset,
+        };
+    }
+
+    fn is_inactive(&self) -> bool {
+        matches!(self, Self::Inactive)
+    }
+
+    fn is_at_node(&self) -> bool {
+        matches!(
+            self,
+            Self::AtNode {
+                node: _,
+                operation_offset: _
+            }
+        )
+    }
+
+    fn node_info(&self) -> Option<(&'a XNode<I, Operation>, u8)> {
+        if let Self::AtNode {
+            node,
+            operation_offset,
+        } = self
+        {
+            Some((node, *operation_offset))
+        } else {
+            None
+        }
+    }
+}
 
 /// A `Cursor` can traverse in the `XArray` and have a target operated `XEntry`, which is stored in the `index` of `XArray`.
 /// `Cursor` can be only created by an `XArray`, and will hold its immutable reference, and can only perform read-only operations
 /// for the corresponding `XArray`.
 /// When a cursor traverses an XArray, at any given moment, it is positioned on an XNode. If not, it means that
-/// the cursor has not begin to traverse. Its member `offset` indicates the next XEntry it will move to, which is the `slots[offset]` inside the current XNode.
+/// the cursor has not begin to traverse.
 ///
-/// At the same time, multiple Cursors are allowed to operate on a single XArray.
-pub(crate) struct Cursor<'a, I>
+/// After traversing, the `Cursor` will arrive at a node where the target entry can be operated on. If the arrival fails,
+/// it will be reset to its initial state, meaning it is not positioned at any node. Therefore, if the `Cursor` is not
+/// in the midst of a traversal operation, it is either not yet started or it has already reached a node where the
+/// target entry can be acted upon.
+///
+/// Multiple Cursors are allowed to operate on a single XArray at the same time.
+///
+/// TODO: Implement `next()` to allow to change the target index in cursors.
+pub(crate) struct Cursor<'a, I, M>
 where
     I: ItemEntry,
+    M: ValidMark,
 {
     /// The `XArray` the cursor located in.
-    xa: &'a XArray<I>,
+    xa: &'a XArray<I, M>,
     /// The target index of the cursor in the belonged `XArray`.
     index: u64,
-    /// The next XEntry to be operated on is at 'offset' in the slots of the current XNode.
-    offset: u8,
-    /// Current positioned XNode.
-    current_node: Option<&'a XNode<I, ReadOnly>>,
+    /// Represents the current state of the cursor.
+    state: CursorState<'a, I, ReadOnly>,
+
     _marker: PhantomData<I>,
 }
 
-impl<'a, I: ItemEntry> Cursor<'a, I> {
+impl<'a, I: ItemEntry, M: ValidMark> Cursor<'a, I, M> {
     /// Create an `Cursor` to perform read related operations on the `XArray`.
-    pub(crate) fn new(xa: &'a XArray<I>, index: u64) -> Self {
+    pub(crate) fn new(xa: &'a XArray<I, M>, index: u64) -> Self {
         Self {
             xa,
             index,
-            offset: 0,
-            current_node: None,
+            state: CursorState::default(),
             _marker: PhantomData,
         }
     }
 
+    /// Obtain a reference to the XEntry from a pointer pointing to it.
+    ///
+    /// # Safety
+    /// The user must ensure that the pointer remains valid for the duration of use of the target XEntry reference.
+    unsafe fn ref_entry(&self, entry_ptr: *const XEntry<I>) -> &'a XEntry<I> {
+        self.xa.ref_entry(entry_ptr)
+    }
+
+    /// Obtain a reference to the XEntry in the slots of target node. The input `offset` indicate
+    /// the offset of the XEntry in the slots.
+    fn ref_node_entry(&self, node: &'a XNode<I, ReadOnly>, offset: u8) -> &'a XEntry<I> {
+        let target_entry_ptr = node.entry(offset);
+        // Safety: The returned entry has the same lifetime with the XNode that owns it.
+        // Hence the position that `target_entry_ptr` points to will be valid during the usage of returned reference.
+        unsafe { self.ref_entry(target_entry_ptr) }
+    }
+
     /// Move the `Cursor` to the `XNode` that `node_entry` points to, and update the cursor's state based on its target index.
     /// Return a reference to the `XEntry` within the slots of the current XNode that needs to be operated on.
-    fn move_to(&mut self, node_entry: &'a XEntry<I>) -> Option<RefEntry<'a, I>> {
-        if let Some(node) = node_entry.as_node() {
-            let (current_entry, offset) = {
-                let offset = node.entry_offset(self.index);
-                let current_entry = node.entry(offset);
-                (current_entry, offset)
-            };
-            self.current_node = Some(node);
-            self.offset = offset;
-            Some(current_entry)
+    fn move_to(&mut self, node: &'a XNode<I, ReadOnly>) -> Option<&'a XEntry<I>> {
+        let (current_entry, offset) = {
+            let offset = node.entry_offset(self.index);
+            let current_entry = self.ref_node_entry(node, offset);
+            (current_entry, offset)
+        };
+        self.state.arrive_node(node, offset);
+        Some(current_entry)
+    }
+
+    /// Judge if the target item is marked with the input `mark`.
+    /// If target item does not exist, the function will return `None`.
+    pub(crate) fn is_marked(&mut self, mark: M) -> Option<bool> {
+        self.traverse_to_target();
+        if let CursorState::AtNode {
+            operation_offset,
+            node,
+        } = self.state
+        {
+            Some(node.is_marked(operation_offset, mark.index()))
         } else {
             None
         }
@@ -58,119 +146,117 @@ impl<'a, I: ItemEntry> Cursor<'a, I> {
     /// Returns a reference to the `XEntry` at the target index if succeed.
     /// If the cursor cannot reach to the target index, the method will return `None`.
     pub(crate) fn load(&mut self) -> Option<&'a XEntry<I>> {
-        if let Some(node) = self.xa.head().as_node() {
-            if (self.index >> node.height()) as u64 > SLOT_MASK as u64 {
-                self.current_node = None;
-                return None;
-            }
-        } else {
+        self.traverse_to_target()
+    }
+
+    /// Traverse the subtree and move to the node that can operate the target entry.
+    /// It then returns the reference to the `XEntry` stored in the slot corresponding to the target index.
+    /// A target operated XEntry must be an item entry.
+    /// If can not touch the target entry, the function will return `None`.
+    fn traverse_to_target(&mut self) -> Option<&'a XEntry<I>> {
+        if self.is_arrived() {
+            let (current_node, operation_offset) = self.state.node_info().unwrap();
+            return Some(self.ref_node_entry(current_node, operation_offset));
+        }
+
+        let max_index = self.xa.max_index();
+        if max_index < self.index || max_index == 0 {
             return None;
         }
+        self.move_to(self.xa.head().as_node().unwrap());
 
-        // # Safety
-        // Because there won't be another concurrent modification operation, the `current_entry` is valid.
-        let mut current_entry = RefEntry::<'a>::new(self.xa.head());
-        while let Some(node) = unsafe { current_entry.as_entry().as_node() } {
-            if node.height() == 0 {
-                break;
+        let (current_node, operation_offset) = self.state.node_info().unwrap();
+        let mut current_layer = current_node.layer();
+        let mut operated_entry = self.ref_node_entry(current_node, operation_offset);
+        while current_layer > 0 {
+            if let None = operated_entry.as_node() {
+                self.init();
+                return None;
             }
-            current_entry = unsafe { self.move_to(current_entry.as_entry()).unwrap() };
-        }
-        unsafe {
-            self.move_to(current_entry.as_entry())
-                .map(|ref_entry| ref_entry.as_entry())
+
+            *current_layer -= 1;
+            operated_entry = self.move_to(operated_entry.as_node().unwrap()).unwrap();
         }
+        Some(operated_entry)
     }
 
     /// Initialize the Cursor to its initial state.
     fn init(&mut self) {
-        self.current_node = None;
-        self.offset = 0;
+        self.state = CursorState::default();
     }
 
     /// Return the target index of the cursor.
     fn index(&mut self) -> u64 {
         self.index
     }
+
+    /// Determine whether the cursor arrive at the node that can operate target entry.
+    /// It can only be used before or after traversing. Since the cursor will only either not yet started or has already reached the target node
+    /// when not in a traversal, it is reasonable to determine whether the cursor has reached its destination node by checking if the cursor is positioned on a node.
+    fn is_arrived(&mut self) -> bool {
+        self.state.is_at_node()
+    }
 }
 
 /// A `CursorMut` can traverse in the `XArray` and have a target operated `XEntry`, which is stored in the `index` of `XArray`.
 /// `Cursor` can be only created by an `XArray`, and will hold its mutable reference, and can perform read and write operations
 /// for the corresponding `XArray`.
 /// When a cursor traverses an XArray, at any given moment, it is positioned on an XNode. If not, it means that
-/// the cursor has not begin to traverse. Its member `offset` indicates the next XEntry it will move to, which is the `slots[offset]` inside the current XNode.
+/// the cursor has not begin to traverse.
+///
+/// After traversing, the `CursorMut` will arrive at a node where the target entry can be operated on. If the arrival fails,
+/// it will be reset to its initial state, meaning it is not positioned at any node. Therefore, if the `CursorMut` is not
+/// in the midst of a traversal operation, it is either not yet started or it has already reached a node where the
+/// target entry can be acted upon.
 ///
 /// When a CursorMut doing operation on XArray, it should not be affected by other CursorMuts or affect other Cursors.
-pub(crate) struct CursorMut<'a, I>
+///
+/// TODO: Implement `next()` to allow to change the target index in cursors.
+pub(crate) struct CursorMut<'a, I, M>
 where
     I: ItemEntry,
+    M: ValidMark,
 {
     /// The `XArray` the cursor located in.
-    xa: &'a mut XArray<I>,
+    xa: &'a mut XArray<I, M>,
     /// The target index of the cursor in the belonged `XArray`.
     index: u64,
-    /// The next XEntry to be operated on is at 'offset' in the slots of the current XNode.
-    offset: u8,
-    /// Current positioned XNode.
-    current_node: Option<&'a XNode<I, ReadWrite>>,
+    /// Represents the current state of the cursor.
+    state: CursorState<'a, I, ReadWrite>,
+
     _marker: PhantomData<I>,
 }
 
-impl<'a, I: ItemEntry> CursorMut<'a, I> {
+impl<'a, I: ItemEntry, M: ValidMark> CursorMut<'a, I, M> {
     /// Create an `CursorMut` to perform read and write operations on the `XArray`.
-    pub(crate) fn new(xa: &'a mut XArray<I>, index: u64) -> Self {
+    pub(crate) fn new(xa: &'a mut XArray<I, M>, index: u64) -> Self {
         Self {
             xa,
             index,
-            offset: 0,
-            current_node: None,
+            state: CursorState::default(),
             _marker: PhantomData,
         }
     }
 
-    /// Move the `CursorMut` to the `XNode` that `node_entry` points to, and update the cursor's state based on its target index.
-    /// Return a reference to the `XEntry` within the slots of the current XNode that needs to be operated on next.
-    fn move_to(&mut self, node_entry: &'a XEntry<I>) -> Option<RefEntry<'a, I>> {
-        if let Some(node) = node_entry.as_node_mut() {
-            let (current_entry, offset) = {
-                let offset = node.entry_offset(self.index);
-                let current_entry = node.entry(offset);
-                (current_entry, offset)
-            };
-            self.current_node = Some(node);
-            self.offset = offset;
-            Some(current_entry)
-        } else {
-            None
-        }
+    /// Obtain a reference to the XEntry in the slots of target node. The input `offset` indicate
+    /// the offset of the XEntry in the slots.
+    fn ref_node_entry(&self, node: &'a XNode<I, ReadWrite>, offset: u8) -> &'a XEntry<I> {
+        let target_entry_ptr = node.entry(offset);
+        // Safety: The returned entry has the same lifetime with the XNode that owns it.
+        // Hence the position that `target_entry_ptr` points to will be valid during the usage of returned reference.
+        unsafe { self.ref_entry(target_entry_ptr) }
     }
 
-    /// Load the `XEntry` at the current cursor index within the `XArray`.
-    ///
-    /// Returns a reference to the `XEntry` at the target index if succeed.
-    /// If the cursor cannot reach to the target index, the method will return `None`.
-    pub(crate) fn load(&mut self) -> Option<&'a XEntry<I>> {
-        if let Some(node) = self.xa.head().as_node() {
-            if (self.index >> node.height()) as u64 > SLOT_MASK as u64 {
-                self.current_node = None;
-                return None;
-            }
-        } else {
-            return None;
-        }
-        // # Safety
-        // Because there won't be another concurrent modification operation, the `current_entry` is valid.
-        let mut current_entry = RefEntry::<'a>::new(self.xa.head());
-        while let Some(node) = unsafe { current_entry.as_entry().as_node() } {
-            if node.height() == 0 {
-                break;
-            }
-            current_entry = unsafe { self.move_to(current_entry.as_entry()).unwrap() };
-        }
-        unsafe {
-            self.move_to(current_entry.as_entry())
-                .map(|ref_entry| ref_entry.as_entry())
-        }
+    /// Move the `CursorMut` to the `XNode` that `node_entry` points to, and update the cursor's state based on its target index.
+    /// Return a reference to the `XEntry` within the slots of the current XNode that needs to be operated on next.
+    fn move_to(&mut self, node: &'a XNode<I, ReadWrite>) -> Option<&'a XEntry<I>> {
+        let (current_entry, offset) = {
+            let offset = node.entry_offset(self.index);
+            let current_entry = self.ref_node_entry(node, offset);
+            (current_entry, offset)
+        };
+        self.state.arrive_node(node, offset);
+        Some(current_entry)
     }
 
     /// Stores the provided `XEntry` in the `XArray` at the position indicated by the current cursor index.
@@ -179,15 +265,65 @@ impl<'a, I: ItemEntry> CursorMut<'a, I> {
     /// the method returns the provided entry without making changes.
     /// Otherwise, it replaces the current entry with the provided one and returns the old entry.
     pub(crate) fn store(&mut self, entry: XEntry<I>) -> XEntry<I> {
-        let current_entry = self.traverse();
-        if entry.raw() == current_entry.raw() {
+        let target_entry = self.traverse_to_target_mut();
+        if entry.raw() == target_entry.raw() {
             return entry;
         }
-        let node = self.current_node.unwrap();
-        let old_entry = node.set_entry(self.offset, entry);
+        let (current_node, operation_offset) = self.state.node_info().unwrap();
+        let old_entry = current_node.set_entry(operation_offset, entry);
         return old_entry;
     }
 
+    /// Mark the item at the target index in the `XArray` with the input `mark`.
+    /// If the item does not exist, return an Error.
+    ///
+    /// This operation will also mark all nodes along the path from the head node to the target node with the input `mark`,
+    /// because a marked intermediate node should be equivalent to having a child node that is marked.
+    pub(crate) fn set_mark(&mut self, mark: M) -> Result<(), ()> {
+        self.traverse_to_target();
+        if let Some((current_node, operation_offset)) = self.state.node_info() {
+            current_node.set_mark(operation_offset, mark.index());
+            let mut offset_in_parent = current_node.offset_in_parent();
+            let mut parent = current_node.parent();
+            while let Some(parent_node) = parent {
+                if parent_node.is_marked(offset_in_parent, mark.index()) {
+                    break;
+                }
+                parent_node.set_mark(offset_in_parent, mark.index());
+                offset_in_parent = parent_node.offset_in_parent();
+                parent = parent_node.parent();
+            }
+            Ok(())
+        } else {
+            Err(())
+        }
+    }
+
+    /// Unset the input `mark` for the item at the target index in the `XArray`.
+    /// If the item does not exist, return an Error.
+    ///
+    /// This operation will also unset the input `mark` for all nodes along the path from the head node to the target node
+    /// if the input `mark` have not marked any of their children.
+    pub(crate) fn unset_mark(&mut self, mark: M) -> Result<(), ()> {
+        self.traverse_to_target();
+        if let Some((mut current_node, operation_offset)) = self.state.node_info() {
+            current_node.unset_mark(operation_offset, mark.index());
+            while current_node.is_mark_clear(mark.index()) {
+                let offset_in_parent = current_node.offset_in_parent();
+                let parent = current_node.parent();
+                if let Some(parent_node) = parent {
+                    parent_node.unset_mark(offset_in_parent, mark.index());
+                    current_node = parent_node;
+                } else {
+                    break;
+                }
+            }
+            Ok(())
+        } else {
+            Err(())
+        }
+    }
+
     /// Removes the `XEntry` at the target index of the 'CursorMut' within the `XArray`.
     ///
     /// This is achieved by storing an empty `XEntry` at the target index using the `store` method.
@@ -196,88 +332,142 @@ impl<'a, I: ItemEntry> CursorMut<'a, I> {
         self.store(XEntry::EMPTY)
     }
 
-    /// Traverse the subtree based on the target index starting from the head node.
-    /// Move continuously until reaching the `XNode` capable of storing the target index.
+    /// Traverse the subtree and move to the node that can operate the target entry.
     /// It then returns the reference to the `XEntry` stored in the slot corresponding to the target index.
     /// A target operated XEntry must be an item entry.
+    /// If can not touch the target entry, the function will return `None`.
+    fn traverse_to_target(&mut self) -> Option<&'a XEntry<I>> {
+        if self.is_arrived() {
+            let (current_node, operation_offset) = self.state.node_info().unwrap();
+            return Some(self.ref_node_entry(current_node, operation_offset));
+        }
+
+        let max_index = self.xa.max_index();
+        if max_index < self.index || max_index == 0 {
+            return None;
+        }
+        let head = self.xa.head_mut().as_node_mut().unwrap();
+        self.move_to(head);
+
+        let (current_node, operation_offset) = self.state.node_info().unwrap();
+        let mut current_layer = current_node.layer();
+        let mut operated_entry = self.ref_node_entry(current_node, operation_offset);
+        while current_layer > 0 {
+            if let None = operated_entry.as_node() {
+                self.init();
+                return None;
+            }
+
+            *current_layer -= 1;
+            operated_entry = self.move_to(operated_entry.as_node_mut().unwrap()).unwrap();
+        }
+        Some(operated_entry)
+    }
+
+    /// Traverse the subtree and move to the node that can operate the target entry.
+    /// During the traverse, the cursor may modify the XArray to let itself be able to reach the target node.
     ///
-    /// Before traverse, the cursor will first expand the height of `XArray` to make sure it have enough capacity.
+    /// Before traverse, the cursor will first expand the layer of `XArray` to make sure it have enough capacity.
     /// During the traverse, the cursor will allocate new `XNode` and put it in the appropriate slot if needed.
-    fn traverse(&mut self) -> &'a XEntry<I> {
-        let mut current_height = self.expand_height();
-        let mut ref_entry = RefEntry::<'a>::new(self.xa.head_mut());
-        // When the target entry has not been reached, the cursor will continue to move downward,
-        // and if it encounters a situation where there is no XNode, it will allocate an XNode.
-        //
-        // # Safety
-        // Because there won't be another concurrent modification operation, the `ref_entry` is valid.
-        while current_height > 0 {
-            let current_entry = unsafe { self.move_to(ref_entry.as_entry()).unwrap().as_entry() };
-            current_height -= NODE_HEIGHT as u8;
-            if let None = current_entry.as_node() {
+    ///
+    /// It then returns the reference to the `XEntry` stored in the slot corresponding to the target index.
+    /// A target operated XEntry must be an item entry.
+    fn traverse_to_target_mut(&mut self) -> &'a XEntry<I> {
+        if self.is_arrived() {
+            let (current_node, operation_offset) = self.state.node_info().unwrap();
+            return self.ref_node_entry(current_node, operation_offset);
+        }
+
+        self.expand_layer();
+        let head_ref = self.xa.head_mut().as_node_mut().unwrap();
+        self.move_to(head_ref);
+
+        let (current_node, operation_offset) = self.state.node_info().unwrap();
+        let mut current_layer = current_node.layer();
+        let mut operated_entry = self.ref_node_entry(current_node, operation_offset);
+        while current_layer > 0 {
+            if let None = operated_entry.as_node() {
                 let new_entry = {
-                    let new_owned_entry = self.alloc_node(current_height, self.offset);
-                    let node = self.current_node.unwrap();
-                    let _ = node.set_entry(self.offset, new_owned_entry);
-                    node.entry(self.offset)
+                    let (current_node, operation_offset) = self.state.node_info().unwrap();
+                    let new_owned_entry = self.alloc_node(
+                        Layer::new(*current_layer - 1),
+                        operation_offset,
+                        Some(current_node),
+                    );
+                    let _ = current_node.set_entry(operation_offset, new_owned_entry);
+                    self.ref_node_entry(current_node, operation_offset)
                 };
-                ref_entry = new_entry;
-            } else {
-                ref_entry = RefEntry::<'a>::new(current_entry);
+                operated_entry = new_entry;
             }
+            *current_layer -= 1;
+            operated_entry = self.move_to(operated_entry.as_node_mut().unwrap()).unwrap();
         }
-        let k = unsafe { self.move_to(ref_entry.as_entry()).unwrap().as_entry() };
-        k
+        operated_entry
     }
 
-    /// Increase the height of XArray to expand its capacity, allowing it to accommodate the target index,
-    /// and returns the height of the final head node.
+    /// Increase the number of layers for XArray to expand its capacity, allowing it to accommodate the target index,
+    /// and returns the layer of the final head node.
     ///
-    /// If the head node of the XArray does not exist, allocate a new head node of appropriate height directly.
+    /// If the head node of the XArray does not exist, allocate a new head node of appropriate layer directly.
     /// Otherwise, if needed, repeatedly insert new nodes on top of the current head node to serve as the new head.
-    fn expand_height(&mut self) -> u8 {
+    fn expand_layer(&mut self) -> Layer {
         if self.xa.head().is_null() {
-            let mut head_height = 0;
-            while (self.index >> head_height) as usize >= SLOT_SIZE {
-                head_height += NODE_HEIGHT as u8;
+            let mut head_layer = Layer::new(0);
+            while self.index > head_layer.max_index() {
+                *head_layer += 1;
             }
-            let head = self.alloc_node(head_height, 0);
+            let head = self.alloc_node(head_layer, 0, None);
             self.xa.set_head(head);
-            return head_height;
+            return head_layer;
         } else {
             loop {
-                let (capacity, head_height) = {
+                let head_layer = {
                     let head = self.xa.head().as_node().unwrap();
-                    (head.max_index(), head.height())
+                    head.layer()
                 };
 
-                if capacity > self.index {
-                    return head_height;
+                if head_layer.max_index() > self.index {
+                    return head_layer;
                 }
 
-                let new_node = self.alloc_node(head_height + NODE_HEIGHT as u8, 0);
-                let old_head_entry = self.xa.set_head(new_node);
-
+                let new_node_entry = self.alloc_node(Layer::new(*head_layer + 1), 0, None);
+                let old_head_entry = self.xa.set_head(new_node_entry);
+                let old_head = old_head_entry.as_node_mut().unwrap();
                 let new_head = self.xa.head_mut().as_node_mut().unwrap();
+                old_head.set_parent(new_head);
                 let _empty = new_head.set_entry(0, old_head_entry);
             }
         }
     }
 
-    /// Allocate a new XNode with the specified height and offset,
+    /// Allocate a new XNode with the specified layer and offset,
     /// then generate a node entry from it and return it to the caller.
-    fn alloc_node(&mut self, height: u8, offset: u8) -> XEntry<I> {
-        XEntry::from_node(XNode::<I, ReadWrite>::new(height, offset))
+    fn alloc_node(
+        &mut self,
+        layer: Layer,
+        offset: u8,
+        parent: Option<&XNode<I, ReadWrite>>,
+    ) -> XEntry<I> {
+        let parent = parent.map(|p| {
+            let arc = unsafe { Arc::from_raw(p as *const XNode<I, ReadWrite>) };
+            let weak = Arc::downgrade(&arc);
+            core::mem::forget(arc);
+            weak
+        });
+        XEntry::from_node(XNode::<I, ReadWrite>::new(layer, offset, parent))
     }
+}
 
-    /// Initialize the Cursor to its initial state.
-    fn init(&mut self) {
-        self.current_node = None;
-        self.offset = 0;
+impl<'a, I: ItemEntry, M: ValidMark> Deref for CursorMut<'a, I, M> {
+    type Target = Cursor<'a, I, M>;
+
+    fn deref(&self) -> &Self::Target {
+        unsafe { &*(self as *const CursorMut<'a, I, M> as *const Cursor<'a, I, M>) }
     }
+}
 
-    /// Return the target index of the cursor.
-    pub(crate) fn index(&mut self) -> u64 {
-        self.index
+impl<'a, I: ItemEntry, M: ValidMark> DerefMut for CursorMut<'a, I, M> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        unsafe { &mut *(self as *const CursorMut<'a, I, M> as *mut Cursor<'a, I, M>) }
     }
 }

+ 19 - 50
src/entry.rs

@@ -1,17 +1,19 @@
-use core::{marker::PhantomData, mem::ManuallyDrop, ptr::NonNull};
-use std::sync::{Arc, Mutex};
+use core::marker::PhantomData;
+use std::{mem::ManuallyDrop, sync::Arc};
 
 use crate::*;
 
 /// A trait that should be implemented for the types users wish to store in an `XArray`.
-/// Items stored in an XArray are required to be 4 bytes in size, Currently it can be various pointer types.
-pub trait ItemEntry {
+/// Items stored in an XArray are required to be 8 bytes in size, Currently it can be various pointer types.
+///
+/// # Safety
+/// Users must ensure that the produced `usize` of `into_raw()` meets the requirements for an item entry in the XArray. Specifically,
+/// if the original type is a pointer, the last two bits should be 00; if the original
+/// type is a value like usize, the last bit should be 1 (TODO).
+pub unsafe trait ItemEntry {
     /// Converts the original type into a `usize`, consuming the ownership of the original type.
     ///
-    /// This `usize` should be directly stored in an XArray's XEntry. Users must ensure that the
-    /// produced `usize` meets the requirements for an item entry in the XArray. Specifically,
-    /// if the original type is a pointer, the last two bits should be 00; if the original
-    /// type is a value like usize, the last bit should be 1 (TODO).
+    /// This `usize` should be directly stored in an XArray's XEntry.
     fn into_raw(self) -> usize;
 
     /// Recovers the original type from a usize, reclaiming ownership.
@@ -22,7 +24,7 @@ pub trait ItemEntry {
     unsafe fn from_raw(raw: usize) -> Self;
 }
 
-impl<T> ItemEntry for Arc<T> {
+unsafe impl<T> ItemEntry for Arc<T> {
     fn into_raw(self) -> usize {
         let raw_ptr = unsafe { core::intrinsics::transmute::<Arc<T>, *const u8>(self) };
         debug_assert!(raw_ptr.is_aligned_to(4));
@@ -35,7 +37,7 @@ impl<T> ItemEntry for Arc<T> {
     }
 }
 
-impl<T> ItemEntry for Box<T> {
+unsafe impl<T> ItemEntry for Box<T> {
     fn into_raw(self) -> usize {
         let raw_ptr = Box::into_raw(self) as *const u8;
         debug_assert!(raw_ptr.is_aligned_to(4));
@@ -92,7 +94,7 @@ impl<I: ItemEntry + Clone> Clone for XEntry<I> {
         } else {
             if self.is_node() {
                 unsafe {
-                    Arc::increment_strong_count((self.raw - 2) as *const Mutex<XNode<I>>);
+                    Arc::increment_strong_count((self.raw - 2) as *const XNode<I>);
                 }
             }
             Self {
@@ -114,9 +116,9 @@ impl<I: ItemEntry> XEntry<I> {
         self.raw
     }
 
-    pub(crate) const EMPTY: Self = Self::new(0);
+    pub(crate) const EMPTY: Self = unsafe { Self::new(0) };
 
-    pub(crate) const fn new(raw: usize) -> Self {
+    pub(crate) const unsafe fn new(raw: usize) -> Self {
         Self {
             raw,
             _marker: PhantomData,
@@ -141,13 +143,13 @@ impl<I: ItemEntry> XEntry<I> {
 
     pub(crate) fn from_item(item: I) -> Self {
         let raw = I::into_raw(item);
-        Self::new(raw as usize)
+        unsafe { Self::new(raw as usize) }
     }
 
     pub(crate) fn into_item(self) -> Option<I> {
         if self.is_item() {
             let item = unsafe { I::from_raw(self.raw) };
-            let _ = ManuallyDrop::new(self);
+            core::mem::forget(self);
             Some(item)
         } else {
             None
@@ -159,7 +161,7 @@ impl<I: ItemEntry> XEntry<I> {
             let arc_node = Arc::new(node);
             Arc::into_raw(arc_node)
         };
-        Self::new(node_ptr as usize | 2)
+        unsafe { Self::new(node_ptr as usize | 2) }
     }
 
     pub(crate) fn as_node(&self) -> Option<&XNode<I>> {
@@ -190,7 +192,7 @@ impl<I: ItemEntry> XEntry<I> {
             unsafe {
                 let arc = Arc::from_raw(raw_ptr);
                 let strong_count = Arc::strong_count(&arc);
-                let _ = ManuallyDrop::new(arc);
+                core::mem::forget(arc);
                 Some(strong_count)
             }
         } else {
@@ -199,38 +201,5 @@ impl<I: ItemEntry> XEntry<I> {
     }
 }
 
-/// An immutable reference to an `XEntry` stored in the `head` of `XArray` or the slots of `XNode` with a lifetime `'a`.
-///
-/// It can be used as `&'a XEntry` during the `'a` lifetime through `as_entry()` method.  
-pub(crate) struct RefEntry<'a, I>
-where
-    I: ItemEntry,
-{
-    ref_ptr: NonNull<XEntry<I>>,
-    _marker: &'a (),
-}
-
-impl<'a, I: ItemEntry> RefEntry<'a, I> {
-    /// Create a `RefEntry` from an input `entry`. The lifetime of `entry` may be shorter than `'a`
-    /// since the `entry` may reference to a lock guard.
-    pub(crate) fn new(entry: &XEntry<I>) -> Self {
-        Self {
-            ref_ptr: NonNull::new(entry as *const XEntry<I> as *mut XEntry<I>).unwrap(),
-            _marker: &(),
-        }
-    }
-
-    /// Return as an `&'a XEntry`.
-    ///
-    /// # Safety
-    /// Ensure that during the lifetime of the `&'a XEntry`, no one modifies the referenced content.
-    pub(crate) unsafe fn as_entry(&self) -> &'a XEntry<I> {
-        &*self.ref_ptr.as_ptr()
-    }
-}
-
 unsafe impl<I: ItemEntry + Sync> Sync for XEntry<I> {}
 unsafe impl<I: ItemEntry + Send> Send for XEntry<I> {}
-
-unsafe impl<'a, I: ItemEntry + Sync> Sync for RefEntry<'a, I> {}
-unsafe impl<'a, I: ItemEntry + Send> Send for RefEntry<'a, I> {}

+ 2 - 0
src/lib.rs

@@ -5,12 +5,14 @@
 use cow::*;
 use cursor::*;
 use entry::*;
+use mark::*;
 use node::*;
 pub use xarray::*;
 
 mod cow;
 mod cursor;
 mod entry;
+mod mark;
 mod node;
 mod xarray;
 

+ 61 - 0
src/mark.rs

@@ -0,0 +1,61 @@
+#[derive(Debug, Clone, Copy)]
+/// A mark can be used to indicate which slots in an XNode contain items that have been marked.
+/// It internally stores a u64, functioning as a bitmap,
+/// where each bit that is set to 1 represents a slot at the corresponding offset that has been marked.
+pub(crate) struct Mark {
+    inner: u64,
+}
+
+impl Mark {
+    pub(crate) const EMPTY: Self = Self::new(0);
+
+    pub(crate) const fn new(inner: u64) -> Self {
+        Self { inner }
+    }
+
+    pub(crate) fn set(&mut self, offset: u8) {
+        self.inner |= 1 << offset as u64;
+    }
+
+    pub(crate) fn unset(&mut self, offset: u8) {
+        self.inner &= !(1 << offset as u64);
+    }
+
+    pub(crate) fn clear(&mut self) {
+        self.inner = 0
+    }
+
+    pub(crate) fn is_marked(&self, offset: u8) -> bool {
+        (self.inner & 1 << offset as u64) != 0
+    }
+
+    pub(crate) fn is_clear(&self) -> bool {
+        self.inner == 0
+    }
+}
+
+// In XArray, an item can have up to three different marks. Users can use a type to distinguish
+// which kind of mark they want to set. Such a type must implement the `ValidMark` trait,
+// meaning it should be convertible to an index in the range of 0 to 2.
+pub trait ValidMark: Copy + Clone {
+    /// Map the self type to an index in the range 0 to 2.
+    fn index_raw(&self) -> usize;
+
+    /// Users are not required to implement this; it ensures that the mapped index does not exceed 2.
+    fn index(&self) -> usize {
+        let index = self.index_raw();
+        debug_assert!(index < 3);
+        index
+    }
+}
+
+/// A meaningless mark used as a default generic parameter for XArray
+/// when marking functionality is not needed.
+#[derive(Clone, Copy)]
+pub struct NoneMark {}
+
+impl ValidMark for NoneMark {
+    fn index_raw(&self) -> usize {
+        0
+    }
+}

+ 169 - 32
src/node.rs

@@ -1,21 +1,83 @@
-use std::{marker::PhantomData, sync::Mutex};
+use core::cmp::Ordering;
+use std::{
+    marker::PhantomData,
+    ops::{Deref, DerefMut},
+    sync::{Arc, Mutex, Weak},
+};
 
 use crate::*;
 
 pub(crate) struct ReadOnly {}
 pub(crate) struct ReadWrite {}
 
+/// The layer of an XNode within an XArray.
+///
+/// In an XArray, the head has the highest layer, while the XNodes that directly store items are at the lowest layer,
+/// with a layer value of 0. Each level up from the bottom layer increases the layer number by 1.
+/// The layer of an XArray is the layer of its head.
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]
+pub(crate) struct Layer {
+    layer: u8,
+}
+
+impl Deref for Layer {
+    type Target = u8;
+
+    fn deref(&self) -> &Self::Target {
+        &self.layer
+    }
+}
+
+impl DerefMut for Layer {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.layer
+    }
+}
+
+impl PartialEq<u8> for Layer {
+    fn eq(&self, other: &u8) -> bool {
+        self.layer == *other
+    }
+}
+
+impl PartialOrd<u8> for Layer {
+    fn partial_cmp(&self, other: &u8) -> Option<Ordering> {
+        self.layer.partial_cmp(other)
+    }
+}
+
+impl Layer {
+    pub(crate) fn new(layer: u8) -> Self {
+        Self { layer }
+    }
+
+    fn layer_shift(&self) -> u8 {
+        self.layer * BITS_PER_LAYER as u8
+    }
+
+    /// Calculate the corresponding offset for the target index at the current layer.
+    pub(crate) fn layer_offset(&self, index: u64) -> u8 {
+        ((index >> self.layer_shift()) & SLOT_MASK as u64) as u8
+    }
+
+    /// Calculate the maximum index that can be represented in XArray at the current layer.
+    pub(crate) fn max_index(&self) -> u64 {
+        ((SLOT_SIZE as u64) << self.layer_shift()) - 1
+    }
+}
+
 /// `XNode` is the intermediate node in the tree-like structure of XArray.
 ///
 /// It contains `SLOT_SIZE` number of XEntries, meaning it can accommodate up to `SLOT_SIZE` child nodes.
-/// The 'height' and 'offset' attributes of an XNode are determined at initialization and remain unchanged thereafter.
+/// The 'layer' and 'offset_in_parent' attributes of an XNode are determined at initialization and remain unchanged thereafter.
+///
 /// XNode has a generic parameter called 'Operation', which has two possible instances: `ReadOnly` and `ReadWrite`.
 /// These instances indicate whether the XNode will only perform read operations or both read and write operations
 /// (where write operations imply potential modifications to the contents of slots).
 pub(crate) struct XNode<I: ItemEntry, Operation = ReadOnly> {
-    /// The node's height from the bottom of the tree. The height of a lead node,
-    /// which stores the user-given items, is zero.
-    height: u8,
+    /// The node's layer from the bottom of the tree. The layer of a lead node,
+    /// which stores the user-given items, is 0.
+    layer: Layer,
     /// This node is its parent's `offset_in_parent`-th child.
     /// This field is meaningless if this node is the root (will be 0).
     offset_in_parent: u8,
@@ -24,93 +86,168 @@ pub(crate) struct XNode<I: ItemEntry, Operation = ReadOnly> {
 }
 
 pub(crate) struct XNodeInner<I: ItemEntry> {
+    parent: Option<Weak<XNode<I, ReadWrite>>>,
     slots: [XEntry<I>; SLOT_SIZE],
+    marks: [Mark; 3],
 }
 
 impl<I: ItemEntry, Operation> XNode<I, Operation> {
-    pub(crate) fn new(height: u8, offset: u8) -> Self {
+    pub(crate) fn new(layer: Layer, offset: u8, parent: Option<Weak<XNode<I, ReadWrite>>>) -> Self {
         Self {
-            height,
+            layer,
             offset_in_parent: offset,
-            inner: Mutex::new(XNodeInner::new()),
+            inner: Mutex::new(XNodeInner::new(parent)),
             _marker: PhantomData,
         }
     }
 
     /// Get the offset in the slots of the current XNode corresponding to the XEntry for the target index.
-    pub(crate) const fn entry_offset(&self, target_index: u64) -> u8 {
-        ((target_index >> self.height as u64) & SLOT_MASK as u64) as u8
-    }
-
-    /// Get the max index the XNode and its child nodes can store.
-    pub(crate) fn max_index(&self) -> u64 {
-        ((SLOT_SIZE as u64) << (self.height as u64)) - 1
+    pub(crate) fn entry_offset(&self, target_index: u64) -> u8 {
+        self.layer.layer_offset(target_index)
     }
 
-    pub(crate) fn height(&self) -> u8 {
-        self.height
+    pub(crate) fn layer(&self) -> Layer {
+        self.layer
     }
 
     pub(crate) fn offset_in_parent(&self) -> u8 {
         self.offset_in_parent
     }
+
+    pub(crate) fn is_marked(&self, offset: u8, mark: usize) -> bool {
+        self.inner.lock().unwrap().is_marked(offset, mark)
+    }
+
+    pub(crate) fn is_mark_clear(&self, mark: usize) -> bool {
+        self.inner.lock().unwrap().is_mark_clear(mark)
+    }
+
+    pub(crate) fn mark(&self, mark: usize) -> Mark {
+        self.inner.lock().unwrap().marks[mark]
+    }
 }
 
 impl<I: ItemEntry> XNode<I, ReadOnly> {
-    pub(crate) fn entry<'a>(&'a self, offset: u8) -> RefEntry<'a, I> {
+    pub(crate) fn parent(&self) -> Option<&XNode<I, ReadOnly>> {
+        self.inner
+            .lock()
+            .unwrap()
+            .parent
+            .as_ref()
+            .map(|parent| unsafe { &*(parent.as_ptr() as *const XNode<I, ReadOnly>) })
+    }
+
+    pub(crate) fn entry<'a>(&'a self, offset: u8) -> *const XEntry<I> {
         let lock = self.inner.lock().unwrap();
         let entry = lock.entry(offset);
-        RefEntry::new(entry)
+        entry
     }
 }
 
 impl<I: ItemEntry> XNode<I, ReadWrite> {
-    pub(crate) fn entry<'a>(&'a self, offset: u8) -> RefEntry<'a, I> {
+    pub(crate) fn parent(&self) -> Option<&XNode<I, ReadWrite>> {
+        self.inner
+            .lock()
+            .unwrap()
+            .parent
+            .as_ref()
+            .map(|parent| unsafe { &*(parent.as_ptr()) })
+    }
+
+    pub(crate) fn entry<'a>(&'a self, offset: u8) -> *const XEntry<I> {
         let mut lock = self.inner.lock().unwrap();
         let entry = lock.entry_mut(offset);
-        RefEntry::new(entry)
+        entry
+    }
+
+    pub(crate) fn set_parent(&self, parent: &XNode<I, ReadWrite>) {
+        let parent = {
+            let arc = unsafe { Arc::from_raw(parent as *const XNode<I, ReadWrite>) };
+            let weak = Arc::downgrade(&arc);
+            core::mem::forget(arc);
+            weak
+        };
+        self.inner.lock().unwrap().parent = Some(parent);
     }
 
     pub(crate) fn set_entry(&self, offset: u8, entry: XEntry<I>) -> XEntry<I> {
         self.inner.lock().unwrap().set_entry(offset, entry)
     }
+
+    pub(crate) fn set_mark(&self, offset: u8, mark: usize) {
+        self.inner.lock().unwrap().set_mark(offset, mark)
+    }
+
+    pub(crate) fn unset_mark(&self, offset: u8, mark: usize) {
+        self.inner.lock().unwrap().unset_mark(offset, mark)
+    }
+
+    pub(crate) fn clear_mark(&self, mark: usize) {
+        self.inner.lock().unwrap().clear_mark(mark)
+    }
 }
 
 impl<I: ItemEntry> XNodeInner<I> {
-    pub(crate) fn new() -> Self {
+    pub(crate) fn new(parent: Option<Weak<XNode<I, ReadWrite>>>) -> Self {
         Self {
+            parent,
             slots: [XEntry::EMPTY; SLOT_SIZE],
+            marks: [Mark::EMPTY; 3],
         }
     }
 
-    pub(crate) fn entry(&self, offset: u8) -> &XEntry<I> {
-        &self.slots[offset as usize]
+    pub(crate) fn entry(&self, offset: u8) -> *const XEntry<I> {
+        &self.slots[offset as usize] as *const XEntry<I>
     }
 
-    pub(crate) fn entry_mut(&mut self, offset: u8) -> &XEntry<I> {
+    pub(crate) fn entry_mut(&mut self, offset: u8) -> *const XEntry<I> {
         // When a modification to the target entry is needed, it first checks whether the entry is shared with other XArrays.
-        // If it is, then it performs a copy-on-write by allocating a new entry and using it,
+        // If it is, then it performs COW by allocating a new entry and using it,
         // to prevent the modification from affecting the read or write operations on other XArrays.
-        self.copy_on_write(
-            unsafe { &*(&self.slots[offset as usize] as *const XEntry<I>) },
-            offset,
-        )
+        if let Some(new_entry) = self.copy_if_shared(&self.slots[offset as usize]) {
+            self.set_entry(offset, new_entry);
+        }
+        &self.slots[offset as usize] as *const XEntry<I>
     }
 
     pub(crate) fn set_entry(&mut self, offset: u8, entry: XEntry<I>) -> XEntry<I> {
         let old_entry = core::mem::replace(&mut self.slots[offset as usize], entry);
         old_entry
     }
+
+    pub(crate) fn set_mark(&mut self, offset: u8, mark: usize) {
+        self.marks[mark].set(offset);
+    }
+
+    pub(crate) fn unset_mark(&mut self, offset: u8, mark: usize) {
+        self.marks[mark].unset(offset);
+    }
+
+    pub(crate) fn is_marked(&self, offset: u8, mark: usize) -> bool {
+        self.marks[mark].is_marked(offset)
+    }
+
+    pub(crate) fn is_mark_clear(&self, mark: usize) -> bool {
+        self.marks[mark].is_clear()
+    }
+
+    pub(crate) fn clear_mark(&mut self, mark: usize) {
+        self.marks[mark].clear();
+    }
 }
 
 pub(crate) fn deep_clone_node_entry<I: ItemEntry + Clone>(entry: &XEntry<I>) -> XEntry<I> {
     debug_assert!(entry.is_node());
     let new_node = {
         let cloned_node: &XNode<I> = entry.as_node().unwrap();
-        let new_node =
-            XNode::<I, ReadWrite>::new(cloned_node.height(), cloned_node.offset_in_parent());
+        let new_node = XNode::<I, ReadWrite>::new(
+            cloned_node.layer(),
+            cloned_node.offset_in_parent(),
+            cloned_node.inner.lock().unwrap().parent.clone(),
+        );
         let mut new_node_lock = new_node.inner.lock().unwrap();
         let cloned_node_lock = cloned_node.inner.lock().unwrap();
+        new_node_lock.marks = cloned_node_lock.marks;
         for i in 0..SLOT_SIZE {
             let entry = &cloned_node_lock.slots[i];
             let new_entry = entry.clone();

+ 111 - 10
src/test.rs

@@ -1,4 +1,6 @@
+#[cfg(test)]
 use super::*;
+#[cfg(test)]
 use std::sync::Arc;
 
 #[test]
@@ -28,27 +30,81 @@ fn test_remove() {
     }
 }
 
+#[test]
+fn test_mark() {
+    #[derive(Clone, Copy)]
+    enum MarkDemo {
+        Mark0,
+        Mark1,
+        Mark2,
+    }
+
+    impl ValidMark for MarkDemo {
+        fn index_raw(&self) -> usize {
+            match self {
+                Self::Mark0 => 0,
+                Self::Mark1 => 1,
+                Self::Mark2 => 2,
+            }
+        }
+    }
+
+    let mut xarray_arc: XArray<Arc<i32>, MarkDemo> = XArray::new();
+    for i in 1..10000 {
+        let value = Arc::new(i * 2);
+        xarray_arc.store(i as u64, value);
+    }
+    xarray_arc.set_mark(1000, MarkDemo::Mark0).unwrap();
+    xarray_arc.set_mark(1000, MarkDemo::Mark1).unwrap();
+    xarray_arc.set_mark(2000, MarkDemo::Mark1).unwrap();
+    let (value1, value1_mark0) = xarray_arc.load_with_mark(1000, MarkDemo::Mark0).unwrap();
+    let (_, value1_mark1) = xarray_arc.load_with_mark(1000, MarkDemo::Mark1).unwrap();
+    let (value2, value2_mark1) = xarray_arc.load_with_mark(2000, MarkDemo::Mark1).unwrap();
+    let (_, value2_mark0) = xarray_arc.load_with_mark(2000, MarkDemo::Mark0).unwrap();
+    let (value3, value3_mark1) = xarray_arc.load_with_mark(3000, MarkDemo::Mark1).unwrap();
+    assert!(*value1.as_ref() == 2000);
+    assert!(*value2.as_ref() == 4000);
+    assert!(*value3.as_ref() == 6000);
+    assert!(value1_mark0 == true);
+    assert!(value1_mark1 == true);
+    assert!(value2_mark0 == false);
+    assert!(value2_mark1 == true);
+    assert!(value3_mark1 == false);
+    assert!(Err(()) == xarray_arc.set_mark(20000, MarkDemo::Mark1));
+
+    xarray_arc.unset_mark(1000, MarkDemo::Mark0).unwrap();
+    xarray_arc.unset_mark(1000, MarkDemo::Mark2).unwrap();
+    let (_, value1_mark0) = xarray_arc.load_with_mark(1000, MarkDemo::Mark0).unwrap();
+    let (_, value1_mark2) = xarray_arc.load_with_mark(1000, MarkDemo::Mark2).unwrap();
+    assert!(value1_mark0 == false);
+    assert!(value1_mark2 == false);
+    assert!(Err(()) == xarray_arc.unset_mark(20000, MarkDemo::Mark1));
+
+    xarray_arc.unset_mark_all(MarkDemo::Mark1);
+    let (_, value2_mark1) = xarray_arc.load_with_mark(2000, MarkDemo::Mark1).unwrap();
+    assert!(value2_mark1 == false);
+}
+
 #[test]
 fn test_cow() {
-    static mut INIT_COUNT: usize = 0;
-    static mut DROP_COUNT: usize = 0;
+    use std::sync::atomic::AtomicU64;
+    use std::sync::atomic::Ordering;
+
+    static INIT_TIMES: AtomicU64 = AtomicU64::new(0);
+    static DROP_TIMES: AtomicU64 = AtomicU64::new(0);
     struct Wrapper {
         raw: usize,
     }
 
     impl Drop for Wrapper {
         fn drop(&mut self) {
-            unsafe {
-                DROP_COUNT += 1;
-            }
+            DROP_TIMES.fetch_add(1, Ordering::Relaxed);
         }
     }
 
     impl Wrapper {
         fn new(raw: usize) -> Self {
-            unsafe {
-                INIT_COUNT += 1;
-            }
+            INIT_TIMES.fetch_add(1, Ordering::Relaxed);
             Self { raw }
         }
     }
@@ -82,7 +138,52 @@ fn test_cow() {
     }
     drop(xarray_arc);
     drop(xarray_clone);
-    unsafe {
-        assert!(INIT_COUNT == DROP_COUNT);
+    assert!(INIT_TIMES.load(Ordering::Relaxed) == DROP_TIMES.load(Ordering::Relaxed));
+}
+
+#[test]
+fn test_cow_mark() {
+    #[derive(Clone, Copy)]
+    enum MarkDemo {
+        Mark0,
+        Mark1,
+    }
+
+    impl ValidMark for MarkDemo {
+        fn index_raw(&self) -> usize {
+            match self {
+                Self::Mark0 => 0,
+                Self::Mark1 => 1,
+            }
+        }
     }
+
+    let mut xarray_arc: XArray<Arc<i32>, MarkDemo> = XArray::new();
+    for i in 1..10000 {
+        let value = Arc::new(i * 2);
+        xarray_arc.store(i as u64, value);
+    }
+    let mut xarray_clone = xarray_arc.clone();
+    xarray_arc.set_mark(1000, MarkDemo::Mark0).unwrap();
+    xarray_arc.set_mark(2000, MarkDemo::Mark0).unwrap();
+    xarray_clone.set_mark(1000, MarkDemo::Mark1).unwrap();
+    xarray_arc.set_mark(3000, MarkDemo::Mark0).unwrap();
+
+    let (_, mark0_1000_arc) = xarray_arc.load_with_mark(1000, MarkDemo::Mark0).unwrap();
+    let (_, mark0_2000_arc) = xarray_arc.load_with_mark(2000, MarkDemo::Mark0).unwrap();
+    let (_, mark1_1000_arc) = xarray_arc.load_with_mark(1000, MarkDemo::Mark1).unwrap();
+    let (_, mark0_1000_clone) = xarray_clone.load_with_mark(1000, MarkDemo::Mark0).unwrap();
+    let (_, mark0_2000_clone) = xarray_clone.load_with_mark(2000, MarkDemo::Mark0).unwrap();
+    let (_, mark1_1000_clone) = xarray_clone.load_with_mark(1000, MarkDemo::Mark1).unwrap();
+    let (_, mark0_3000_arc) = xarray_arc.load_with_mark(3000, MarkDemo::Mark0).unwrap();
+    let (_, mark0_3000_clone) = xarray_clone.load_with_mark(3000, MarkDemo::Mark0).unwrap();
+
+    assert!(mark0_1000_arc == true);
+    assert!(mark0_2000_arc == true);
+    assert!(mark1_1000_arc == false);
+    assert!(mark0_1000_clone == false);
+    assert!(mark0_2000_clone == false);
+    assert!(mark1_1000_clone == true);
+    assert!(mark0_3000_arc == true);
+    assert!(mark0_3000_clone == false);
 }

+ 105 - 13
src/xarray.rs

@@ -1,9 +1,9 @@
-use std::marker::PhantomData;
+use std::{collections::VecDeque, marker::PhantomData};
 
 use crate::*;
 
-pub(crate) const NODE_HEIGHT: usize = 6;
-pub(crate) const SLOT_SIZE: usize = 1 << NODE_HEIGHT;
+pub(crate) const BITS_PER_LAYER: usize = 6;
+pub(crate) const SLOT_SIZE: usize = 1 << BITS_PER_LAYER;
 pub(crate) const SLOT_MASK: usize = SLOT_SIZE - 1;
 
 /// The XArray is an abstract data type which behaves like a very large array of items.
@@ -17,9 +17,13 @@ pub(crate) const SLOT_MASK: usize = SLOT_SIZE - 1;
 /// can achieve Clone with a COW mechanism. When cloning an XArray, initially the new XArray shares a head with the original XArray
 /// without performing an actual clone. If either of the XArrays needs to perform a mutable operation, a substantive clone of the XNode to be modified is created before making the update.
 /// This ensures that operations on the two XArrays do not affect each other.
-/// **Reference.** All operations on XArray are performed through `Cursor` and `CursorMut`. 
-/// Cursor requires an immutable reference to XArray, while CursorMut requires a mutable reference. 
+/// **Reference.** All operations on XArray are performed through `Cursor` and `CursorMut`.
+/// Cursor requires an immutable reference to XArray, while CursorMut requires a mutable reference.
 /// Therefore, XArray can have multiple Cursors operating at the same time, whereas the operations of CursorMut are exclusive (similar to the relationship between & and &mut).
+/// **Mark.** `XArray` supports the ability to add marks to any stored item to assist users.
+/// By default, an item can be marked with up to three different marks, with each mark being independent of the others.
+/// Marks for an item are typically enumerations that must implement the ValidMark trait.
+/// Internal nodes can also be marked. When an intermediate node is marked, it signifies that it has child nodes that have been marked.
 ///
 /// # Example
 ///
@@ -43,15 +47,16 @@ pub(crate) const SLOT_MASK: usize = SLOT_SIZE - 1;
 ///
 /// The concepts XArray are originally introduced by Linux, which keeps the data structure of Linux's radix tree
 /// [Linux Radix Trees](https://lwn.net/Articles/175432/).
-pub struct XArray<I>
+pub struct XArray<I, M = NoneMark>
 where
     I: ItemEntry,
+    M: ValidMark,
 {
     head: XEntry<I>,
-    _marker: PhantomData<I>,
+    _marker: PhantomData<(I, M)>,
 }
 
-impl<I: ItemEntry> XArray<I> {
+impl<I: ItemEntry, M: ValidMark> XArray<I, M> {
     /// Make a new, empty XArray.
     pub const fn new() -> Self {
         Self {
@@ -68,9 +73,20 @@ impl<I: ItemEntry> XArray<I> {
     /// Return a reference to the head entry, and later will modify the XNode pointed to by the `head`.
     pub(crate) fn head_mut(&mut self) -> &XEntry<I> {
         // When a modification to the head is needed, it first checks whether the head is shared with other XArrays.
-        // If it is, then it performs a copy-on-write by allocating a new head and using it,
+        // If it is, then it performs COW by allocating a new head and using it,
         // to prevent the modification from affecting the read or write operations on other XArrays.
-        self.copy_on_write(unsafe { &*(&self.head as *const XEntry<I>) }, 0)
+        if let Some(new_head) = self.copy_if_shared(&self.head) {
+            self.set_head(new_head);
+        }
+        &self.head
+    }
+
+    pub(crate) fn max_index(&self) -> u64 {
+        if let Some(node) = self.head.as_node() {
+            node.layer().max_index()
+        } else {
+            0
+        }
     }
 
     /// Set the head of the `XArray` with the new `XEntry`, and return the old `head`.
@@ -99,6 +115,82 @@ impl<I: ItemEntry> XArray<I> {
         XEntry::into_item(old_entry)
     }
 
+    /// Attempts to load the item and its mark information about input `mark` at the target index within the `XArray`.
+    /// If the target item exists, return it with `Some`, Otherwise, return `None`.
+    pub fn load_with_mark(&self, index: u64, mark: M) -> Option<(&I, bool)> {
+        let mut cursor = self.cursor(index);
+        let entry = cursor.load();
+        let mark = if entry.is_some() {
+            cursor.is_marked(mark)
+        } else {
+            None
+        };
+        if entry.is_some_and(|entry| entry.is_item()) {
+            entry.map(|entry| {
+                (
+                    unsafe { &*(entry as *const XEntry<I> as *const I) },
+                    mark.unwrap(),
+                )
+            })
+        } else {
+            None
+        }
+    }
+
+    /// Stores the provided item in the `XArray` at the target index and mark it with input `mark`.
+    /// and return the old item if it was previously stored in target index.
+    pub fn store_with_mark(&mut self, index: u64, item: I, mark: M) -> Option<I> {
+        let stored_entry = XEntry::from_item(item);
+        let mut cursor = self.cursor_mut(index);
+        let old_entry = cursor.store(stored_entry);
+        cursor.set_mark(mark).unwrap();
+        XEntry::into_item(old_entry)
+    }
+
+    /// Mark the item at the target index in the `XArray` with the input `mark`.
+    /// If the item does not exist, return an Error.
+    pub fn set_mark(&mut self, index: u64, mark: M) -> Result<(), ()> {
+        self.cursor_mut(index).set_mark(mark)
+    }
+
+    /// Unset the input `mark` for the item at the target index in the `XArray`.
+    /// If the item does not exist, return an Error.
+    pub fn unset_mark(&mut self, index: u64, mark: M) -> Result<(), ()> {
+        self.cursor_mut(index).unset_mark(mark)
+    }
+
+    /// Obtain a reference to the XEntry from a pointer pointing to it.
+    ///
+    /// # Safety
+    /// The user must ensure that the pointer remains valid for the duration of use of the target XEntry reference.
+    pub(crate) unsafe fn ref_entry(&self, entry_ptr: *const XEntry<I>) -> &XEntry<I> {
+        &*entry_ptr
+    }
+
+    /// Unset the input `mark` for all of the items in the `XArray`.
+    pub fn unset_mark_all(&mut self, mark: M) {
+        let mut handle_list = VecDeque::new();
+        if let Some(node) = self.head.as_node_mut() {
+            handle_list.push_back(node);
+        }
+        while !handle_list.is_empty() {
+            let node = handle_list.pop_front().unwrap();
+            let mut offset = 0;
+            let node_mark = node.mark(mark.index());
+            while (offset as usize) < SLOT_SIZE {
+                if node_mark.is_marked(offset) {
+                    // Safety: During this operation, the used XNode will not be removed and rge referenced XEntry must be valid.
+                    let entry = unsafe { self.ref_entry(node.entry(offset)) };
+                    if let Some(node) = entry.as_node_mut() {
+                        handle_list.push_back(node);
+                    }
+                }
+                offset += 1;
+            }
+            node.clear_mark(mark.index());
+        }
+    }
+
     /// Removes the `XEntry` at the target index within the `XArray`,
     /// and return the removed item if it was previously stored in target index.
     pub fn remove(&mut self, index: u64) -> Option<I> {
@@ -107,17 +199,17 @@ impl<I: ItemEntry> XArray<I> {
     }
 
     /// Create an `Cursor` to perform read related operations on the `XArray`.
-    fn cursor<'a>(&'a self, index: u64) -> Cursor<'a, I> {
+    pub(crate) fn cursor<'a>(&'a self, index: u64) -> Cursor<'a, I, M> {
         Cursor::new(self, index)
     }
 
     /// Create an `CursorMut` to perform read and write operations on the `XArray`.
-    fn cursor_mut<'a>(&'a mut self, index: u64) -> CursorMut<'a, I> {
+    pub(crate) fn cursor_mut<'a>(&'a mut self, index: u64) -> CursorMut<'a, I, M> {
         CursorMut::new(self, index)
     }
 }
 
-impl<I: ItemEntry + Clone> Clone for XArray<I> {
+impl<I: ItemEntry + Clone, M: ValidMark> Clone for XArray<I, M> {
     /// Clone with cow mechanism.
     fn clone(&self) -> Self {
         let cloned_head = self.head.clone();