From 50e99fd07a277e6cd2e43272c682a63a09cfd43b Mon Sep 17 00:00:00 2001 From: Bert Peters Date: Sun, 16 May 2021 14:16:51 +0200 Subject: [PATCH] Rework dependency-tracking to be poison-free Now new dependency edges that introduce cycles are simply rejected, not affecting the overall graph. This simplifies the visible API and also removes the need to restore the graph. --- README.md | 1 + src/graph.rs | 128 ++++++++++--------------------------------------- src/lib.rs | 38 ++++++--------- src/stdsync.rs | 9 ---- 4 files changed, 42 insertions(+), 134 deletions(-) diff --git a/README.md b/README.md index 83e1196..f623a66 100644 --- a/README.md +++ b/README.md @@ -54,5 +54,6 @@ available for other synchronization primitives. ## Future improvements - Improve performance in lock tracing +- Optional logging to make debugging easier - Better and configurable error handling when detecting cyclic dependencies - Support for other locking libraries, such as `parking_lot` diff --git a/src/graph.rs b/src/graph.rs index 3c86a5a..81e8a04 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -14,8 +14,8 @@ type Order = usize; /// original paper. /// /// This digraph tracks its own topological order and updates it when new edges are added to the -/// graph. After a cycle has been introduced, the order is no longer kept up to date as it doesn't -/// exist, but new edges are still tracked. Nodes are added implicitly when they're used in edges. +/// graph. If a cycle is added that would create a cycle, that edge is rejected and the graph is not +/// visibly changed. /// /// [paper]: https://whileydave.com/publications/pk07_jea/ #[derive(Clone, Default, Debug)] @@ -27,9 +27,6 @@ where out_edges: HashMap>, /// Next topological sort order next_ord: Order, - /// Poison flag, set if a cycle is detected when adding a new edge and - /// unset when removing a node successfully removed the cycle. - contains_cycle: bool, /// Topological sort order. Order is not guaranteed to be contiguous ord: HashMap, } @@ -71,45 +68,49 @@ where self.out_edges.get_mut(&other).unwrap().remove(&n); } - if self.contains_cycle { - // Need to build a valid topological order - self.recompute_topological_order(); - } - true } } } - /// Add an edge to the graph + /// Attempt to add an edge to the graph /// - /// Nodes, both from and to, are created as needed when creating new edges. + /// Nodes, both from and to, are created as needed when creating new edges. If the new edge + /// would introduce a cycle, the edge is rejected and `false` is returned. pub(crate) fn add_edge(&mut self, x: V, y: V) -> bool { if x == y { // self-edges are not considered cycles - return false; + return true; } let (_, out_edges, ub) = self.add_node(x); if !out_edges.insert(y) { // Edge already exists, nothing to be done - return false; + return true; } let (in_edges, _, lb) = self.add_node(y); in_edges.insert(x); - if !self.contains_cycle && lb < ub { + if lb < ub { // This edge might introduce a cycle, need to recompute the topological sort let mut visited = HashSet::new(); let mut delta_f = Vec::new(); let mut delta_b = Vec::new(); if !self.dfs_f(y, ub, &mut visited, &mut delta_f) { - self.contains_cycle = true; - return true; + // This edge introduces a cycle, so we want to reject it and remove it from the + // graph again to keep the "does not contain cycles" invariant. + + // We use map instead of unwrap to avoid an `unwrap()` but we know that these + // entries are present as we just added them above. + self.in_edges.get_mut(&y).map(|nodes| nodes.remove(&x)); + self.out_edges.get_mut(&x).map(|nodes| nodes.remove(&y)); + + // No edge was added + return false; } // No need to check as we should've found the cycle on the forward pass @@ -187,78 +188,6 @@ where // Can use unstable sort because mutex ids should not be equal ids.sort_unstable_by_key(|v| self.ord[v]); } - - pub fn has_cycles(&self) -> bool { - self.contains_cycle - } - - /// Attempt to recompute a valid topological order. - /// - /// This method implements the DFS method to find leave nodes to find the reverse order - fn recompute_topological_order(&mut self) { - // This function should only be called when the graph contains a cycle. - debug_assert!(self.contains_cycle); - - let mut permanent_marks = HashSet::with_capacity(self.out_edges.len()); - let mut temporary_marks = HashSet::new(); - let mut rev_order = Vec::with_capacity(self.out_edges.len()); - - for node in self.out_edges.keys() { - if permanent_marks.contains(node) { - continue; - } - - if !self.visit( - *node, - &mut permanent_marks, - &mut temporary_marks, - &mut rev_order, - ) { - // Cycle found, no order possible - return; - } - } - - // We didn't find a cycle, so we can reset - self.contains_cycle = false; - // Newly allocated order is contiguous 0..rev_order.len() - self.next_ord = rev_order.len(); - - self.ord.clear(); - self.ord - .extend(rev_order.into_iter().rev().enumerate().map(|(k, v)| (v, k))) - } - - /// Helper function for `Self::recompute_topological_order`. - fn visit( - &self, - v: V, - permanent_marks: &mut HashSet, - temporary_marks: &mut HashSet, - rev_order: &mut Vec, - ) -> bool { - if permanent_marks.contains(&v) { - return true; - } - - if !temporary_marks.insert(v) { - return false; - } - - if !self.out_edges[&v] - .iter() - .all(|&w| self.visit(w, permanent_marks, temporary_marks, rev_order)) - { - return false; - } - - temporary_marks.remove(&v); - permanent_marks.insert(v); - - rev_order.push(v); - - true - } } #[cfg(test)] @@ -270,20 +199,15 @@ mod tests { let mut graph = DiGraph::default(); // Add some safe edges - graph.add_edge(0, 1); - graph.add_edge(1, 2); - graph.add_edge(2, 3); - graph.add_edge(4, 2); + assert!(graph.add_edge(0, 1)); + assert!(graph.add_edge(1, 2)); + assert!(graph.add_edge(2, 3)); + assert!(graph.add_edge(4, 2)); - // Should not have a cycle yet - assert!(!graph.has_cycles()); + // Try to add an edge that introduces a cycle + assert!(!graph.add_edge(3, 1)); - // Introduce cycle 3 → 1 → 2 → 3 - graph.add_edge(3, 1); - assert!(graph.has_cycles()); - - // Removing 3 should remove that cycle - assert!(graph.remove_node(3)); - assert!(!graph.has_cycles()) + // Add an edge that should reorder 0 to be after 4 + assert!(graph.add_edge(4, 0)); } } diff --git a/src/lib.rs b/src/lib.rs index e152cd6..10a6cf4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,9 +10,10 @@ //! //! The primary method by which this crate signals an invalid lock acquisition order is by //! panicking. When a cycle is created in the dependency graph when acquiring a lock, the thread -//! will instead panic. This panic will not poison the underlying mutex. Each following acquired -//! that introduces a **new** dependency will also panic, until enough mutexes are deallocated to -//! break the cycle in the graph. +//! will instead panic. This panic will not poison the underlying mutex. +//! +//! This conflicting dependency is not added to the graph, so future attempts at locking should +//! succeed as normal. //! //! # Structure //! @@ -35,9 +36,8 @@ //! //! - **Allocating a lock** performs an atomic update to a shared counter. //! -//! - **Deallocating a mutex** temporarily locks the global dependency graph to remove the lock from -//! it. If the graph contained a cycle, a complete scan of the (now pruned) graph is done to -//! determine if this is still the case. +//! - **Deallocating a mutex** temporarily locks the global dependency graph to remove the lock +//! entry in the dependency graph. //! //! These operations have been reasonably optimized, but the performance penalty may yet be too much //! for production use. In those cases, it may be beneficial to instead use debug-only versions @@ -127,7 +127,7 @@ impl MutexId { if let Some(&previous) = locks.borrow().last() { let mut graph = get_dependency_graph(); - graph.add_edge(previous, self.value()) && graph.has_cycles() + !graph.add_edge(previous, self.value()) } else { false } @@ -273,11 +273,6 @@ fn get_dependency_graph() -> impl DerefMut> { mod tests { use super::*; - lazy_static! { - /// Mutex to isolate tests manipulating the global mutex graph - pub(crate) static ref GRAPH_MUTEX: Mutex<()> = Mutex::new(()); - } - #[test] fn test_next_mutex_id() { let initial = MutexId::new(); @@ -289,26 +284,23 @@ mod tests { #[test] fn test_lazy_mutex_id() { - let _graph_lock = GRAPH_MUTEX.lock(); - let a = LazyMutexId::new(); let b = LazyMutexId::new(); let c = LazyMutexId::new(); let mut graph = get_dependency_graph(); - graph.add_edge(a.value(), b.value()); - graph.add_edge(b.value(), c.value()); + assert!(graph.add_edge(a.value(), b.value())); + assert!(graph.add_edge(b.value(), c.value())); - assert!(!graph.has_cycles()); - - graph.add_edge(c.value(), a.value()); - assert!(graph.has_cycles()); + // Creating an edge c → a should fail as it introduces a cycle. + assert!(!graph.add_edge(c.value(), a.value())); // Drop graph handle so we can drop vertices without deadlocking drop(graph); - drop(c); - // If c's destructor correctly ran the graph shouldn't contain cycles anymore - assert!(!get_dependency_graph().has_cycles()); + drop(b); + + // If b's destructor correctly ran correctly we can now add an edge from c to a. + assert!(get_dependency_graph().add_edge(c.value(), a.value())); } } diff --git a/src/stdsync.rs b/src/stdsync.rs index 53af4e0..974b00d 100644 --- a/src/stdsync.rs +++ b/src/stdsync.rs @@ -380,12 +380,9 @@ mod tests { use std::thread; use super::*; - use crate::tests::GRAPH_MUTEX; #[test] fn test_mutex_usage() { - let _graph_lock = GRAPH_MUTEX.lock(); - let mutex = Arc::new(TracingMutex::new(())); let mutex_clone = mutex.clone(); @@ -403,8 +400,6 @@ mod tests { #[test] fn test_rwlock_usage() { - let _graph_lock = GRAPH_MUTEX.lock(); - let rwlock = Arc::new(TracingRwLock::new(())); let rwlock_clone = rwlock.clone(); @@ -425,8 +420,6 @@ mod tests { #[test] fn test_once_usage() { - let _graph_lock = GRAPH_MUTEX.lock(); - let once = Arc::new(TracingOnce::new()); let once_clone = once.clone(); @@ -448,8 +441,6 @@ mod tests { #[test] #[should_panic(expected = "Mutex order graph should not have cycles")] fn test_detect_cycle() { - let _graph_lock = GRAPH_MUTEX.lock(); - let a = TracingMutex::new(()); let b = TracingMutex::new(());