From 61b45a911d3756cbd216a673ddfc9e50e0c214e6 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 11:30:25 +1100 Subject: [PATCH 01/16] Remove code comment Remove unnecessary code comment, the local variables names are clear enough. --- src/policy/semantic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index dc84a9439..d6c9e61d3 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -407,7 +407,7 @@ impl Policy { let n = subs.len() - unsatisfied_count - trivial_count; // remove all true/false let m = k.checked_sub(trivial_count).map_or(0, |x| x); // satisfy all trivial - // m == n denotes `and` and m == 1 denotes `or` + let is_and = m == n; let is_or = m == 1; for sub in subs { From 10e59821a79a7c8a664bf40983b585757adf44d6 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Fri, 13 Oct 2023 10:40:12 +1100 Subject: [PATCH 02/16] Add line of whitespace This function is fiendishly hard to understand, add a line of whitespace as a tiny baby step towards making it clearer. --- src/policy/semantic.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index d6c9e61d3..be5319a4d 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -410,6 +410,7 @@ impl Policy { let is_and = m == n; let is_or = m == 1; + for sub in subs { match sub { Policy::Trivial | Policy::Unsatisfiable => {} From 721e16bc0336e2cb769a9c0264713c322c9804ae Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 11:32:57 +1100 Subject: [PATCH 03/16] Use unwrap_or Currently we are using `map_or` in a convoluted manner, we can just use `unwrap_or` to get the inner value or default to 0. Refactor only, no logic changes. --- src/policy/semantic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index be5319a4d..e15585552 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -406,7 +406,7 @@ impl Policy { .count(); let n = subs.len() - unsatisfied_count - trivial_count; // remove all true/false - let m = k.checked_sub(trivial_count).map_or(0, |x| x); // satisfy all trivial + let m = k.checked_sub(trivial_count).unwrap_or(0); // satisfy all trivial let is_and = m == n; let is_or = m == 1; From 2fd42d667b7e6900b44dee2f4555ca884fba6edf Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 11:53:55 +1100 Subject: [PATCH 04/16] Improve spacing in error string We have a little typo in an error string, fix it up. --- src/policy/semantic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index e15585552..0b4a87f2b 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -372,7 +372,7 @@ impl_from_tree!( // thresh(1) and thresh(n) are disallowed in semantic policies if thresh <= 1 || thresh >= (nsubs as u32 - 1) { return Err(errstr( - "Semantic Policy thresh cannot have k = 1 or k =n, use `and`/`or` instead", + "Semantic Policy thresh cannot have k = 1 or k = n, use `and`/`or` instead", )); } if thresh >= (nsubs as u32) { From 161753c41d69ddc594a45b9ae32decb1847e2ca6 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 14:34:08 +1100 Subject: [PATCH 05/16] Fix imports in test module Use standard from for import statements. (Note `Arc` is already in scope in `super`.) --- src/policy/mod.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/policy/mod.rs b/src/policy/mod.rs index 138ec45b7..edb68ec9a 100644 --- a/src/policy/mod.rs +++ b/src/policy/mod.rs @@ -223,14 +223,12 @@ mod tests { use core::str::FromStr; use bitcoin::Sequence; - #[cfg(feature = "compiler")] - use sync::Arc; - use super::super::miniscript::context::Segwitv0; - use super::super::miniscript::Miniscript; - use super::{Concrete, Liftable, Semantic}; + use super::*; #[cfg(feature = "compiler")] use crate::descriptor::Tr; + use crate::miniscript::context::Segwitv0; + use crate::miniscript::Miniscript; use crate::prelude::*; #[cfg(feature = "compiler")] use crate::{descriptor::TapTree, Descriptor, Tap}; From 0ef5e54f8ae0110dd67b78d68840149cacba9090 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Tue, 10 Oct 2023 05:35:27 +1100 Subject: [PATCH 06/16] Move TreeLike impl to concrete module When we implemented the `TreeLike` trait for the concrete `Policy` we put it in the `iter` module, it doesn't really live there, better to put the impl in the same place as the definition of the implementor. Move the impl of `TreeLike` for `concrete::Policy` to the `concrete` module. --- src/iter/mod.rs | 28 +--------------------------- src/policy/concrete.rs | 30 +++++++++++++++++++++++++++++- 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/src/iter/mod.rs b/src/iter/mod.rs index 553ae0f47..02e59b6e2 100644 --- a/src/iter/mod.rs +++ b/src/iter/mod.rs @@ -15,7 +15,7 @@ pub use tree::{ }; use crate::sync::Arc; -use crate::{policy, Miniscript, MiniscriptKey, ScriptContext, Terminal}; +use crate::{Miniscript, MiniscriptKey, ScriptContext, Terminal}; impl<'a, Pk: MiniscriptKey, Ctx: ScriptContext> TreeLike for &'a Miniscript { fn as_node(&self) -> Tree { @@ -68,29 +68,3 @@ impl TreeLike for Arc } } } - -impl<'a, Pk: MiniscriptKey> TreeLike for &'a policy::Concrete { - fn as_node(&self) -> Tree { - use policy::Concrete::*; - match *self { - Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) - | Ripemd160(_) | Hash160(_) => Tree::Nullary, - And(ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), - Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| p.as_ref()).collect()), - Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), - } - } -} - -impl TreeLike for Arc> { - fn as_node(&self) -> Tree { - use policy::Concrete::*; - match self.as_ref() { - Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) - | Ripemd160(_) | Hash160(_) => Tree::Nullary, - And(ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), - Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| Arc::clone(p)).collect()), - Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), - } - } -} diff --git a/src/policy/concrete.rs b/src/policy/concrete.rs index 19e7771b9..63af68331 100644 --- a/src/policy/concrete.rs +++ b/src/policy/concrete.rs @@ -23,7 +23,7 @@ use { use super::ENTAILMENT_MAX_TERMINALS; use crate::expression::{self, FromTree}; -use crate::iter::TreeLike; +use crate::iter::{Tree, TreeLike}; use crate::miniscript::types::extra_props::TimelockInfo; use crate::prelude::*; use crate::sync::Arc; @@ -1114,6 +1114,34 @@ fn generate_combination( ret } +impl<'a, Pk: MiniscriptKey> TreeLike for &'a Policy { + fn as_node(&self) -> Tree { + use Policy::*; + + match *self { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + And(ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), + Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| p.as_ref()).collect()), + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), + } + } +} + +impl TreeLike for Arc> { + fn as_node(&self) -> Tree { + use Policy::*; + + match self.as_ref() { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + And(ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), + Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| Arc::clone(p)).collect()), + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), + } + } +} + #[cfg(all(test, feature = "compiler"))] mod compiler_tests { use core::str::FromStr; From e813ad0799aeb62526953fb6b22e2c015409b4cb Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Tue, 10 Oct 2023 06:27:22 +1100 Subject: [PATCH 07/16] Add Arc to the semantic::Policy::Thresh vector In preparation for removing recursive algorithms in the `semantic` module add an `Arc` wrapper around the policies in the `Thresh` vector. Note we use the more explicit `Arc::new` instead of `into` because a lot of these should be removed when we remove recursion, later it may be necessary to check that we are not creating new references when a clone will suffice, the explicit `Arc::new` is easier to check. In tests however just use `into`. --- src/descriptor/sortedmulti.rs | 3 +- src/descriptor/tr.rs | 17 +++-- src/policy/mod.rs | 63 ++++++++++++------- src/policy/semantic.rs | 113 ++++++++++++++++++++++------------ 4 files changed, 126 insertions(+), 70 deletions(-) diff --git a/src/descriptor/sortedmulti.rs b/src/descriptor/sortedmulti.rs index 3bd5fa8e0..9c68db830 100644 --- a/src/descriptor/sortedmulti.rs +++ b/src/descriptor/sortedmulti.rs @@ -17,6 +17,7 @@ use crate::miniscript::limits::MAX_PUBKEYS_PER_MULTISIG; use crate::miniscript::satisfy::{Placeholder, Satisfaction}; use crate::plan::AssetProvider; use crate::prelude::*; +use crate::sync::Arc; use crate::{ errstr, expression, policy, script_num_size, Error, ForEachKey, Miniscript, MiniscriptKey, Satisfier, ToPublicKey, TranslateErr, Translator, @@ -201,7 +202,7 @@ impl policy::Liftable for SortedMulti self.k, self.pks .iter() - .map(|k| policy::semantic::Policy::Key(k.clone())) + .map(|k| Arc::new(policy::semantic::Policy::Key(k.clone()))) .collect(), ); Ok(ret) diff --git a/src/descriptor/tr.rs b/src/descriptor/tr.rs index e4815d657..2ef33d9a9 100644 --- a/src/descriptor/tr.rs +++ b/src/descriptor/tr.rs @@ -614,9 +614,10 @@ impl Liftable for TapTree { fn lift(&self) -> Result, Error> { fn lift_helper(s: &TapTree) -> Result, Error> { match *s { - TapTree::Tree { ref left, ref right, height: _ } => { - Ok(Policy::Threshold(1, vec![lift_helper(left)?, lift_helper(right)?])) - } + TapTree::Tree { ref left, ref right, height: _ } => Ok(Policy::Threshold( + 1, + vec![Arc::new(lift_helper(left)?), Arc::new(lift_helper(right)?)], + )), TapTree::Leaf(ref leaf) => leaf.lift(), } } @@ -629,9 +630,13 @@ impl Liftable for TapTree { impl Liftable for Tr { fn lift(&self) -> Result, Error> { match &self.tree { - Some(root) => { - Ok(Policy::Threshold(1, vec![Policy::Key(self.internal_key.clone()), root.lift()?])) - } + Some(root) => Ok(Policy::Threshold( + 1, + vec![ + Arc::new(Policy::Key(self.internal_key.clone())), + Arc::new(root.lift()?), + ], + )), None => Ok(Policy::Key(self.internal_key.clone())), } } diff --git a/src/policy/mod.rs b/src/policy/mod.rs index edb68ec9a..4798e3564 100644 --- a/src/policy/mod.rs +++ b/src/policy/mod.rs @@ -23,7 +23,7 @@ pub use self::semantic::Policy as Semantic; use crate::descriptor::Descriptor; use crate::miniscript::{Miniscript, ScriptContext}; use crate::sync::Arc; -use crate::{Error, MiniscriptKey, Terminal}; +use crate::{Error, MiniscriptKey, Terminal, Vec}; /// Policy entailment algorithm maximum number of terminals allowed. const ENTAILMENT_MAX_TERMINALS: usize = 20; @@ -136,28 +136,40 @@ impl Liftable for Terminal { | Terminal::NonZero(ref sub) | Terminal::ZeroNotEqual(ref sub) => sub.node.lift()?, Terminal::AndV(ref left, ref right) | Terminal::AndB(ref left, ref right) => { - Semantic::Threshold(2, vec![left.node.lift()?, right.node.lift()?]) + Semantic::Threshold( + 2, + vec![Arc::new(left.node.lift()?), Arc::new(right.node.lift()?)], + ) } Terminal::AndOr(ref a, ref b, ref c) => Semantic::Threshold( 1, vec![ - Semantic::Threshold(2, vec![a.node.lift()?, b.node.lift()?]), - c.node.lift()?, + Arc::new(Semantic::Threshold( + 2, + vec![Arc::new(a.node.lift()?), Arc::new(b.node.lift()?)], + )), + Arc::new(c.node.lift()?), ], ), Terminal::OrB(ref left, ref right) | Terminal::OrD(ref left, ref right) | Terminal::OrC(ref left, ref right) - | Terminal::OrI(ref left, ref right) => { - Semantic::Threshold(1, vec![left.node.lift()?, right.node.lift()?]) - } + | Terminal::OrI(ref left, ref right) => Semantic::Threshold( + 1, + vec![Arc::new(left.node.lift()?), Arc::new(right.node.lift()?)], + ), Terminal::Thresh(k, ref subs) => { - let semantic_subs: Result<_, Error> = subs.iter().map(|s| s.node.lift()).collect(); - Semantic::Threshold(k, semantic_subs?) - } - Terminal::Multi(k, ref keys) | Terminal::MultiA(k, ref keys) => { - Semantic::Threshold(k, keys.iter().map(|k| Semantic::Key(k.clone())).collect()) + let semantic_subs: Result>, Error> = + subs.iter().map(|s| s.node.lift()).collect(); + let semantic_subs = semantic_subs?.into_iter().map(Arc::new).collect(); + Semantic::Threshold(k, semantic_subs) } + Terminal::Multi(k, ref keys) | Terminal::MultiA(k, ref keys) => Semantic::Threshold( + k, + keys.iter() + .map(|k| Arc::new(Semantic::Key(k.clone()))) + .collect(), + ), } .normalized(); Ok(ret) @@ -197,17 +209,22 @@ impl Liftable for Concrete { Concrete::Ripemd160(ref h) => Semantic::Ripemd160(h.clone()), Concrete::Hash160(ref h) => Semantic::Hash160(h.clone()), Concrete::And(ref subs) => { - let semantic_subs: Result<_, Error> = subs.iter().map(Liftable::lift).collect(); - Semantic::Threshold(2, semantic_subs?) + let semantic_subs: Result>, Error> = + subs.iter().map(Liftable::lift).collect(); + let semantic_subs = semantic_subs?.into_iter().map(Arc::new).collect(); + Semantic::Threshold(2, semantic_subs) } Concrete::Or(ref subs) => { - let semantic_subs: Result<_, Error> = + let semantic_subs: Result>, Error> = subs.iter().map(|(_p, sub)| sub.lift()).collect(); - Semantic::Threshold(1, semantic_subs?) + let semantic_subs = semantic_subs?.into_iter().map(Arc::new).collect(); + Semantic::Threshold(1, semantic_subs) } Concrete::Threshold(k, ref subs) => { - let semantic_subs: Result<_, Error> = subs.iter().map(Liftable::lift).collect(); - Semantic::Threshold(k, semantic_subs?) + let semantic_subs: Result>, Error> = + subs.iter().map(Liftable::lift).collect(); + let semantic_subs = semantic_subs?.into_iter().map(Arc::new).collect(); + Semantic::Threshold(k, semantic_subs) } } .normalized(); @@ -346,14 +363,14 @@ mod tests { Semantic::Threshold( 1, vec![ - Semantic::Threshold( + Arc::new(Semantic::Threshold( 2, vec![ - Semantic::Key(key_a), - Semantic::Older(Sequence::from_height(42)) + Arc::new(Semantic::Key(key_a)), + Arc::new(Semantic::Older(Sequence::from_height(42))) ] - ), - Semantic::Key(key_b) + )), + Arc::new(Semantic::Key(key_b)) ] ), ms_str.lift().unwrap() diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 0b4a87f2b..20a34f4fb 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -13,6 +13,7 @@ use bitcoin::{absolute, Sequence}; use super::concrete::PolicyError; use super::ENTAILMENT_MAX_TERMINALS; use crate::prelude::*; +use crate::sync::Arc; use crate::{errstr, expression, AbsLockTime, Error, ForEachKey, MiniscriptKey, Translator}; /// Abstract policy which corresponds to the semantics of a miniscript and @@ -42,7 +43,7 @@ pub enum Policy { /// A HASH160 whose preimage must be provided to satisfy the descriptor. Hash160(Pk::Hash160), /// A set of descriptors, satisfactions must be provided for `k` of them. - Threshold(usize, Vec>), + Threshold(usize, Vec>>), } impl Policy @@ -144,7 +145,8 @@ impl Policy { Policy::Threshold(k, ref subs) => { let new_subs: Result>, _> = subs.iter().map(|sub| sub.translate_pk(t)).collect(); - new_subs.map(|ok| Policy::Threshold(k, ok)) + let new_subs = new_subs?.into_iter().map(Arc::new).collect(); + Ok(Policy::Threshold(k, new_subs)) } } } @@ -217,8 +219,9 @@ impl Policy { Policy::Threshold(k, subs) => { let mut ret_subs = vec![]; for sub in subs { - ret_subs.push(sub.satisfy_constraint(witness, available)); + ret_subs.push(sub.as_ref().clone().satisfy_constraint(witness, available)); } + let ret_subs = ret_subs.into_iter().map(Arc::new).collect(); Policy::Threshold(k, ret_subs) } ref leaf if leaf == witness => { @@ -344,7 +347,7 @@ impl_from_tree!( } let mut subs = Vec::with_capacity(nsubs); for arg in &top.args { - subs.push(Policy::from_tree(arg)?); + subs.push(Arc::new(Policy::from_tree(arg)?)); } Ok(Policy::Threshold(nsubs, subs)) } @@ -354,7 +357,7 @@ impl_from_tree!( } let mut subs = Vec::with_capacity(nsubs); for arg in &top.args { - subs.push(Policy::from_tree(arg)?); + subs.push(Arc::new(Policy::from_tree(arg)?)); } Ok(Policy::Threshold(1, subs)) } @@ -381,7 +384,7 @@ impl_from_tree!( let mut subs = Vec::with_capacity(top.args.len() - 1); for arg in &top.args[1..] { - subs.push(Policy::from_tree(arg)?); + subs.push(Arc::new(Policy::from_tree(arg)?)); } Ok(Policy::Threshold(thresh as usize, subs)) } @@ -398,11 +401,17 @@ impl Policy { Policy::Threshold(k, subs) => { let mut ret_subs = Vec::with_capacity(subs.len()); - let subs: Vec<_> = subs.into_iter().map(|sub| sub.normalized()).collect(); - let trivial_count = subs.iter().filter(|&pol| *pol == Policy::Trivial).count(); + let subs: Vec<_> = subs + .into_iter() + .map(|sub| Arc::new(sub.as_ref().clone().normalized())) + .collect(); + let trivial_count = subs + .iter() + .filter(|&pol| *pol.as_ref() == Policy::Trivial) + .count(); let unsatisfied_count = subs .iter() - .filter(|&pol| *pol == Policy::Unsatisfiable) + .filter(|&pol| *pol.as_ref() == Policy::Unsatisfiable) .count(); let n = subs.len() - unsatisfied_count - trivial_count; // remove all true/false @@ -412,20 +421,20 @@ impl Policy { let is_or = m == 1; for sub in subs { - match sub { + match sub.as_ref() { Policy::Trivial | Policy::Unsatisfiable => {} - Policy::Threshold(k, subs) => { + Policy::Threshold(ref k, ref subs) => { match (is_and, is_or) { (true, true) => { // means m = n = 1, thresh(1,X) type thing. - ret_subs.push(Policy::Threshold(k, subs)); + ret_subs.push(Arc::new(Policy::Threshold(*k, subs.to_vec()))); } - (true, false) if k == subs.len() => ret_subs.extend(subs), // and case - (false, true) if k == 1 => ret_subs.extend(subs), // or case - _ => ret_subs.push(Policy::Threshold(k, subs)), + (true, false) if *k == subs.len() => ret_subs.extend(subs.to_vec()), // and case + (false, true) if *k == 1 => ret_subs.extend(subs.to_vec()), // or case + _ => ret_subs.push(Arc::new(Policy::Threshold(*k, subs.to_vec()))), } } - x => ret_subs.push(x), + x => ret_subs.push(Arc::new(x.clone())), } } // Now reason about m of n threshold @@ -434,7 +443,9 @@ impl Policy { } else if m > ret_subs.len() { Policy::Unsatisfiable } else if ret_subs.len() == 1 { - ret_subs.pop().unwrap() + let policy = ret_subs.pop().unwrap(); + // Only one strong reference because we created the Arc when pushing to ret_subs. + Arc::try_unwrap(policy).unwrap() } else if is_and { Policy::Threshold(ret_subs.len(), ret_subs) } else if is_or { @@ -531,9 +542,12 @@ impl Policy { Policy::Older(t) } } - Policy::Threshold(k, subs) => { - Policy::Threshold(k, subs.into_iter().map(|sub| sub.at_age(age)).collect()) - } + Policy::Threshold(k, subs) => Policy::Threshold( + k, + subs.into_iter() + .map(|sub| Arc::new(sub.as_ref().clone().at_age(age))) + .collect(), + ), x => x, }; self.normalized() @@ -558,9 +572,12 @@ impl Policy { Policy::After(t.into()) } } - Policy::Threshold(k, subs) => { - Policy::Threshold(k, subs.into_iter().map(|sub| sub.at_lock_time(n)).collect()) - } + Policy::Threshold(k, subs) => Policy::Threshold( + k, + subs.into_iter() + .map(|sub| Arc::new(sub.as_ref().clone().at_lock_time(n))) + .collect(), + ), x => x, }; self.normalized() @@ -601,7 +618,7 @@ impl Policy { | Policy::Hash160(..) => Some(0), Policy::Threshold(k, ref subs) => { let mut sublens: Vec = - subs.iter().filter_map(Policy::minimum_n_keys).collect(); + subs.iter().filter_map(|p| p.minimum_n_keys()).collect(); if sublens.len() < k { // Not enough branches are satisfiable None @@ -623,7 +640,10 @@ impl Policy { pub fn sorted(self) -> Policy { match self { Policy::Threshold(k, subs) => { - let mut new_subs: Vec<_> = subs.into_iter().map(Policy::sorted).collect(); + let mut new_subs: Vec<_> = subs + .into_iter() + .map(|p| Arc::new(p.as_ref().clone().sorted())) + .collect(); new_subs.sort(); Policy::Threshold(k, new_subs) } @@ -694,8 +714,8 @@ mod tests { Policy::Threshold( 1, vec![ - Policy::Key("".to_owned()), - Policy::Older(Sequence::from_height(1000)), + Policy::Key("".to_owned()).into(), + Policy::Older(Sequence::from_height(1000)).into(), ] ) ); @@ -714,7 +734,13 @@ mod tests { let policy = StringPolicy::from_str("or(pk(),UNSATISFIABLE)").unwrap(); assert_eq!( policy, - Policy::Threshold(1, vec![Policy::Key("".to_owned()), Policy::Unsatisfiable,]) + Policy::Threshold( + 1, + vec![ + Policy::Key("".to_owned()).into(), + Policy::Unsatisfiable.into() + ] + ) ); assert_eq!(policy.relative_timelocks(), vec![]); assert_eq!(policy.absolute_timelocks(), vec![]); @@ -724,7 +750,13 @@ mod tests { let policy = StringPolicy::from_str("and(pk(),UNSATISFIABLE)").unwrap(); assert_eq!( policy, - Policy::Threshold(2, vec![Policy::Key("".to_owned()), Policy::Unsatisfiable,]) + Policy::Threshold( + 2, + vec![ + Policy::Key("".to_owned()).into(), + Policy::Unsatisfiable.into() + ] + ) ); assert_eq!(policy.relative_timelocks(), vec![]); assert_eq!(policy.absolute_timelocks(), vec![]); @@ -742,11 +774,11 @@ mod tests { Policy::Threshold( 2, vec![ - Policy::Older(Sequence::from_height(1000)), - Policy::Older(Sequence::from_height(10000)), - Policy::Older(Sequence::from_height(1000)), - Policy::Older(Sequence::from_height(2000)), - Policy::Older(Sequence::from_height(2000)), + Policy::Older(Sequence::from_height(1000)).into(), + Policy::Older(Sequence::from_height(10000)).into(), + Policy::Older(Sequence::from_height(1000)).into(), + Policy::Older(Sequence::from_height(2000)).into(), + Policy::Older(Sequence::from_height(2000)).into(), ] ) ); @@ -766,11 +798,11 @@ mod tests { Policy::Threshold( 2, vec![ - Policy::Older(Sequence::from_height(1000)), - Policy::Older(Sequence::from_height(10000)), - Policy::Older(Sequence::from_height(1000)), - Policy::Unsatisfiable, - Policy::Unsatisfiable, + Policy::Older(Sequence::from_height(1000)).into(), + Policy::Older(Sequence::from_height(10000)).into(), + Policy::Older(Sequence::from_height(1000)).into(), + Policy::Unsatisfiable.into(), + Policy::Unsatisfiable.into(), ] ) ); @@ -876,7 +908,8 @@ mod tests { "or(and(older(4096),thresh(2,pk(A),pk(B),pk(C))),thresh(11,pk(F1),pk(F2),pk(F3),pk(F4),pk(F5),pk(F6),pk(F7),pk(F8),pk(F9),pk(F10),pk(F11),pk(F12),pk(F13),pk(F14)))").unwrap(); // Very bad idea to add master key,pk but let's have it have 50M blocks let master_key = StringPolicy::from_str("and(older(50000000),pk(master))").unwrap(); - let new_liquid_pol = Policy::Threshold(1, vec![liquid_pol.clone(), master_key]); + let new_liquid_pol = + Policy::Threshold(1, vec![liquid_pol.clone().into(), master_key.into()]); assert!(liquid_pol.clone().entails(new_liquid_pol.clone()).unwrap()); assert!(!new_liquid_pol.entails(liquid_pol.clone()).unwrap()); From df3a85a76f5cbca2991111eafe7ca636502f16df Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 10:50:41 +1100 Subject: [PATCH 08/16] Implement TreeLike for semantic::Policy In preparation for removing recursive algorithms in the `semantic` module implement the `TreeLike` trait to enable iteration over policy nodes. This is a direct copy of the `concrete::Policy` impl with the `And` and `Or` variants removed. --- src/policy/semantic.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 20a34f4fb..427e93d8a 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -12,6 +12,7 @@ use bitcoin::{absolute, Sequence}; use super::concrete::PolicyError; use super::ENTAILMENT_MAX_TERMINALS; +use crate::iter::{Tree, TreeLike}; use crate::prelude::*; use crate::sync::Arc; use crate::{errstr, expression, AbsLockTime, Error, ForEachKey, MiniscriptKey, Translator}; @@ -652,6 +653,30 @@ impl Policy { } } +impl<'a, Pk: MiniscriptKey> TreeLike for &'a Policy { + fn as_node(&self) -> Tree { + use Policy::*; + + match *self { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), + } + } +} + +impl<'a, Pk: MiniscriptKey> TreeLike for Arc> { + fn as_node(&self) -> Tree { + use Policy::*; + + match self.as_ref() { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), + } + } +} + #[cfg(test)] mod tests { use core::str::FromStr; From 97ba4e9a6500a75e96c56d3a0027ab118419e317 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 10:58:09 +1100 Subject: [PATCH 09/16] semantic: Remove recursion in for_each_key Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::for_each_key`. --- src/policy/semantic.rs | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 427e93d8a..e6a765b90 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -66,27 +66,14 @@ where impl ForEachKey for Policy { fn for_each_key<'a, F: FnMut(&'a Pk) -> bool>(&'a self, mut pred: F) -> bool { - self.real_for_each_key(&mut pred) + self.pre_order_iter().all(|policy| match policy { + Policy::Key(ref pk) => pred(pk), + _ => true, + }) } } impl Policy { - fn real_for_each_key<'a, F: FnMut(&'a Pk) -> bool>(&'a self, pred: &mut F) -> bool { - match *self { - Policy::Unsatisfiable | Policy::Trivial => true, - Policy::Key(ref pk) => pred(pk), - Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) - | Policy::After(..) - | Policy::Older(..) => true, - Policy::Threshold(_, ref subs) => { - subs.iter().all(|sub| sub.real_for_each_key(&mut *pred)) - } - } - } - /// Converts a policy using one kind of public key to another type of public key. /// /// # Examples From 211abad49a94326a01ee605d3c9bbb929bc940ff Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 11:00:51 +1100 Subject: [PATCH 10/16] semantic: Remove recursion in translate_pk Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::translate_pk`. --- src/policy/semantic.rs | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index e6a765b90..4ba70e008 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -120,23 +120,30 @@ impl Policy { T: Translator, Q: MiniscriptKey, { - match *self { - Policy::Unsatisfiable => Ok(Policy::Unsatisfiable), - Policy::Trivial => Ok(Policy::Trivial), - Policy::Key(ref pk) => t.pk(pk).map(Policy::Key), - Policy::Sha256(ref h) => t.sha256(h).map(Policy::Sha256), - Policy::Hash256(ref h) => t.hash256(h).map(Policy::Hash256), - Policy::Ripemd160(ref h) => t.ripemd160(h).map(Policy::Ripemd160), - Policy::Hash160(ref h) => t.hash160(h).map(Policy::Hash160), - Policy::After(n) => Ok(Policy::After(n)), - Policy::Older(n) => Ok(Policy::Older(n)), - Policy::Threshold(k, ref subs) => { - let new_subs: Result>, _> = - subs.iter().map(|sub| sub.translate_pk(t)).collect(); - let new_subs = new_subs?.into_iter().map(Arc::new).collect(); - Ok(Policy::Threshold(k, new_subs)) - } + use Policy::*; + + let mut translated = vec![]; + for data in self.post_order_iter() { + let child_n = |n| Arc::clone(&translated[data.child_indices[n]]); + + let new_policy = match data.node { + Unsatisfiable => Unsatisfiable, + Trivial => Trivial, + Key(ref pk) => t.pk(pk).map(Key)?, + Sha256(ref h) => t.sha256(h).map(Sha256)?, + Hash256(ref h) => t.hash256(h).map(Hash256)?, + Ripemd160(ref h) => t.ripemd160(h).map(Ripemd160)?, + Hash160(ref h) => t.hash160(h).map(Hash160)?, + Older(ref n) => Older(*n), + After(ref n) => After(*n), + Threshold(ref k, ref subs) => Threshold(*k, (0..subs.len()).map(child_n).collect()), + }; + translated.push(Arc::new(new_policy)); } + // Unwrap is ok because we know we processed at least one node. + let root_node = translated.pop().unwrap(); + // Unwrap is ok because we know `root_node` is the only strong reference. + Ok(Arc::try_unwrap(root_node).unwrap()) } /// Computes whether the current policy entails the second one. From b1ae1ff0b31730a58be26792fcff0e7f0b1b86eb Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 11:17:36 +1100 Subject: [PATCH 11/16] semantic: Remove recursion in n_terminals Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::n_terminals`. --- src/policy/semantic.rs | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 4ba70e008..36a1a59c8 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -181,11 +181,21 @@ impl Policy { // Helper function to compute the number of constraints in policy. fn n_terminals(&self) -> usize { - match self { - &Policy::Threshold(_k, ref subs) => subs.iter().map(|sub| sub.n_terminals()).sum(), - &Policy::Trivial | &Policy::Unsatisfiable => 0, - _leaf => 1, + use Policy::*; + + let mut n_terminals = vec![]; + for data in self.post_order_iter() { + let n_terminals_for_child_n = |n| n_terminals[data.child_indices[n]]; + + let num = match data.node { + Threshold(_k, subs) => (0..subs.len()).map(n_terminals_for_child_n).sum(), + Trivial | Unsatisfiable => 0, + _leaf => 1, + }; + n_terminals.push(num); } + // Ok to unwrap because we know we processed at least one node. + n_terminals.pop().unwrap() } // Helper function to get the first constraint in the policy. From c3c5e726ce79ad797ce5e860b429d5063ba6180c Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 13:45:50 +1100 Subject: [PATCH 12/16] semantic: Remove recursion in real_*_timelocks Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in the `semantic::Policy::real_*_timelocks` functions. Done together because they are basically identical. --- src/policy/semantic.rs | 42 ++++++++++++------------------------------ 1 file changed, 12 insertions(+), 30 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 36a1a59c8..30d48d375 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -479,21 +479,12 @@ impl Policy { /// Helper function to do the recursion in `timelocks`. fn real_relative_timelocks(&self) -> Vec { - match *self { - Policy::Unsatisfiable - | Policy::Trivial - | Policy::Key(..) - | Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) => vec![], - Policy::After(..) => vec![], - Policy::Older(t) => vec![t.to_consensus_u32()], - Policy::Threshold(_, ref subs) => subs.iter().fold(vec![], |mut acc, x| { - acc.extend(x.real_relative_timelocks()); - acc - }), - } + self.pre_order_iter() + .filter_map(|policy| match policy { + Policy::Older(t) => Some(t.to_consensus_u32()), + _ => None, + }) + .collect() } /// Returns a list of all relative timelocks, not including 0, which appear @@ -507,21 +498,12 @@ impl Policy { /// Helper function for recursion in `absolute timelocks` fn real_absolute_timelocks(&self) -> Vec { - match *self { - Policy::Unsatisfiable - | Policy::Trivial - | Policy::Key(..) - | Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) => vec![], - Policy::Older(..) => vec![], - Policy::After(t) => vec![t.to_u32()], - Policy::Threshold(_, ref subs) => subs.iter().fold(vec![], |mut acc, x| { - acc.extend(x.real_absolute_timelocks()); - acc - }), - } + self.pre_order_iter() + .filter_map(|policy| match policy { + Policy::After(t) => Some(t.to_u32()), + _ => None, + }) + .collect() } /// Returns a list of all absolute timelocks, not including 0, which appear From d9f9d43e5139c8cab644df067a43a948021cbf61 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 13:53:51 +1100 Subject: [PATCH 13/16] semantic: Remove recursion in at_age and at_lock_time Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::at_age` and `semantic::Policy::at_age`. Done together because they are basically identical. --- src/policy/semantic.rs | 105 +++++++++++++++++++++++++---------------- 1 file changed, 64 insertions(+), 41 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 30d48d375..4eb4ddc07 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -517,57 +517,80 @@ impl Policy { /// Filters a policy by eliminating relative timelock constraints /// that are not satisfied at the given `age`. - pub fn at_age(mut self, age: Sequence) -> Policy { - self = match self { - Policy::Older(t) => { - if t.is_height_locked() && age.is_time_locked() - || t.is_time_locked() && age.is_height_locked() - || t.to_consensus_u32() > age.to_consensus_u32() - { - Policy::Unsatisfiable - } else { - Policy::Older(t) + pub fn at_age(self, age: Sequence) -> Policy { + use Policy::*; + + let mut at_age = vec![]; + for data in Arc::new(self).post_order_iter() { + let child_n = |n| Arc::clone(&at_age[data.child_indices[n]]); + + let new_policy = match data.node.as_ref() { + Older(ref t) => { + if t.is_height_locked() && age.is_time_locked() + || t.is_time_locked() && age.is_height_locked() + || t.to_consensus_u32() > age.to_consensus_u32() + { + Some(Policy::Unsatisfiable) + } else { + Some(Policy::Older(*t)) + } + } + Threshold(k, ref subs) => { + Some(Threshold(*k, (0..subs.len()).map(child_n).collect())) } + _ => None, + }; + match new_policy { + Some(new_policy) => at_age.push(Arc::new(new_policy)), + None => at_age.push(Arc::clone(&data.node)), } - Policy::Threshold(k, subs) => Policy::Threshold( - k, - subs.into_iter() - .map(|sub| Arc::new(sub.as_ref().clone().at_age(age))) - .collect(), - ), - x => x, - }; - self.normalized() + } + // Unwrap is ok because we know we processed at least one node. + let root_node = at_age.pop().unwrap(); + // Unwrap is ok because we know `root_node` is the only strong reference. + let policy = Arc::try_unwrap(root_node).unwrap(); + policy.normalized() } /// Filters a policy by eliminating absolute timelock constraints /// that are not satisfied at the given `n` (`n OP_CHECKLOCKTIMEVERIFY`). - pub fn at_lock_time(mut self, n: absolute::LockTime) -> Policy { + pub fn at_lock_time(self, n: absolute::LockTime) -> Policy { use absolute::LockTime::*; + use Policy::*; - self = match self { - Policy::After(t) => { - let t = absolute::LockTime::from(t); - let is_satisfied_by = match (t, n) { - (Blocks(t), Blocks(n)) => t <= n, - (Seconds(t), Seconds(n)) => t <= n, - _ => false, - }; - if !is_satisfied_by { - Policy::Unsatisfiable - } else { - Policy::After(t.into()) + let mut at_age = vec![]; + for data in Arc::new(self).post_order_iter() { + let child_n = |n| Arc::clone(&at_age[data.child_indices[n]]); + + let new_policy = match data.node.as_ref() { + After(t) => { + let t = absolute::LockTime::from(*t); + let is_satisfied_by = match (t, n) { + (Blocks(t), Blocks(n)) => t <= n, + (Seconds(t), Seconds(n)) => t <= n, + _ => false, + }; + if !is_satisfied_by { + Some(Unsatisfiable) + } else { + Some(After(t.into())) + } + } + Threshold(k, ref subs) => { + Some(Threshold(*k, (0..subs.len()).map(child_n).collect())) } + _ => None, + }; + match new_policy { + Some(new_policy) => at_age.push(Arc::new(new_policy)), + None => at_age.push(Arc::clone(&data.node)), } - Policy::Threshold(k, subs) => Policy::Threshold( - k, - subs.into_iter() - .map(|sub| Arc::new(sub.as_ref().clone().at_lock_time(n))) - .collect(), - ), - x => x, - }; - self.normalized() + } + // Unwrap is ok because we know we processed at least one node. + let root_node = at_age.pop().unwrap(); + // Unwrap is ok because we know `root_node` is the only strong reference. + let policy = Arc::try_unwrap(root_node).unwrap(); + policy.normalized() } /// Counts the number of public keys and keyhashes referenced in a policy. From eb75602f8c2ac12fbb514c3ec297c80e04d0ba3b Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 14:03:05 +1100 Subject: [PATCH 14/16] semantic: Remove recursion in n_keys Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::n_keys`. --- src/policy/semantic.rs | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 4eb4ddc07..02d0058a4 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -596,17 +596,12 @@ impl Policy { /// Counts the number of public keys and keyhashes referenced in a policy. /// Duplicate keys will be double-counted. pub fn n_keys(&self) -> usize { - match *self { - Policy::Unsatisfiable | Policy::Trivial => 0, - Policy::Key(..) => 1, - Policy::After(..) - | Policy::Older(..) - | Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) => 0, - Policy::Threshold(_, ref subs) => subs.iter().map(|sub| sub.n_keys()).sum::(), - } + self.pre_order_iter() + .filter(|policy| match policy { + Policy::Key(..) => true, + _ => false, + }) + .count() } /// Counts the minimum number of public keys for which signatures could be From 09a47cb0de9f7be5d6dc85163cd59a1edf2d741b Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 14:16:26 +1100 Subject: [PATCH 15/16] semantic: Remove recursion in minimum_n_keys Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::minimum_n_keys`. --- src/policy/semantic.rs | 46 ++++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 02d0058a4..0e473d734 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -611,28 +611,34 @@ impl Policy { /// /// Returns `None` if the policy is not satisfiable. pub fn minimum_n_keys(&self) -> Option { - match *self { - Policy::Unsatisfiable => None, - Policy::Trivial => Some(0), - Policy::Key(..) => Some(1), - Policy::After(..) - | Policy::Older(..) - | Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) => Some(0), - Policy::Threshold(k, ref subs) => { - let mut sublens: Vec = - subs.iter().filter_map(|p| p.minimum_n_keys()).collect(); - if sublens.len() < k { - // Not enough branches are satisfiable - None - } else { - sublens.sort_unstable(); - Some(sublens[0..k].iter().cloned().sum::()) + use Policy::*; + + let mut minimum_n_keys = vec![]; + for data in Arc::new(self).post_order_iter() { + let minimum_n_keys_for_child_n = |n| minimum_n_keys[data.child_indices[n]]; + + let minimum_n_key = match data.node { + Unsatisfiable => None, + Trivial | After(..) | Older(..) | Sha256(..) | Hash256(..) | Ripemd160(..) + | Hash160(..) => Some(0), + Key(..) => Some(1), + Threshold(k, ref subs) => { + let mut sublens = (0..subs.len()) + .filter_map(minimum_n_keys_for_child_n) + .collect::>(); + if sublens.len() < *k { + // Not enough branches are satisfiable + None + } else { + sublens.sort_unstable(); + Some(sublens[0..*k].iter().cloned().sum::()) + } } - } + }; + minimum_n_keys.push(minimum_n_key); } + // Ok to unwrap because we know we processed at least one node. + minimum_n_keys.pop().unwrap() } } From f9307c8ced48b55b899f1f006117aeded51772e2 Mon Sep 17 00:00:00 2001 From: "Tobin C. Harding" Date: Wed, 11 Oct 2023 14:22:11 +1100 Subject: [PATCH 16/16] semantic: Remove recursion in sorted Done as part of the effort to remove all the recursion crate wide. Use the `TreeLike` trait to iterate over policy nodes and remove the recursive call in `semantic::Policy::sorted`. --- src/policy/semantic.rs | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/src/policy/semantic.rs b/src/policy/semantic.rs index 0e473d734..0a2b015b7 100644 --- a/src/policy/semantic.rs +++ b/src/policy/semantic.rs @@ -649,17 +649,29 @@ impl Policy { /// in general this appears to require Gröbner basis techniques that are not /// implemented. pub fn sorted(self) -> Policy { - match self { - Policy::Threshold(k, subs) => { - let mut new_subs: Vec<_> = subs - .into_iter() - .map(|p| Arc::new(p.as_ref().clone().sorted())) - .collect(); - new_subs.sort(); - Policy::Threshold(k, new_subs) + use Policy::*; + + let mut sorted = vec![]; + for data in Arc::new(self).post_order_iter() { + let child_n = |n| Arc::clone(&sorted[data.child_indices[n]]); + + let new_policy = match data.node.as_ref() { + Threshold(k, ref subs) => { + let mut subs = (0..subs.len()).map(child_n).collect::>(); + subs.sort(); + Some(Threshold(*k, subs)) + } + _ => None, + }; + match new_policy { + Some(new_policy) => sorted.push(Arc::new(new_policy)), + None => sorted.push(Arc::clone(&data.node)), } - x => x, } + // Unwrap is ok because we know we processed at least one node. + let root_node = sorted.pop().unwrap(); + // Unwrap is ok because we know `root_node` is the only strong reference. + Arc::try_unwrap(root_node).unwrap() } }