summaryrefslogtreecommitdiff
path: root/src/ir/analysis/mod.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/ir/analysis/mod.rs')
-rw-r--r--src/ir/analysis/mod.rs49
1 files changed, 32 insertions, 17 deletions
diff --git a/src/ir/analysis/mod.rs b/src/ir/analysis/mod.rs
index ef42e58d..cf4e3b4d 100644
--- a/src/ir/analysis/mod.rs
+++ b/src/ir/analysis/mod.rs
@@ -43,8 +43,8 @@ pub use self::template_params::UsedTemplateParameters;
mod derive_debug;
pub use self::derive_debug::CannotDeriveDebug;
mod has_vtable;
-pub use self::has_vtable::HasVtableAnalysis;
pub use self::has_vtable::HasVtable;
+pub use self::has_vtable::HasVtableAnalysis;
mod derive_default;
pub use self::derive_default::CannotDeriveDefault;
mod derive_copy;
@@ -115,7 +115,8 @@ pub trait MonotoneFramework: Sized + fmt::Debug {
/// queue up in the worklist when `constrain(node)` reports updated
/// information.
fn each_depending_on<F>(&self, node: Self::Node, f: F)
- where F: FnMut(Self::Node);
+ where
+ F: FnMut(Self::Node);
}
/// Whether an analysis's `constrain` function modified the incremental results
@@ -131,16 +132,18 @@ pub enum ConstrainResult {
/// Run an analysis in the monotone framework.
pub fn analyze<Analysis>(extra: Analysis::Extra) -> Analysis::Output
- where Analysis: MonotoneFramework,
+where
+ Analysis: MonotoneFramework,
{
let mut analysis = Analysis::new(extra);
let mut worklist = analysis.initial_worklist();
while let Some(node) = worklist.pop() {
if let ConstrainResult::Changed = analysis.constrain(node) {
- analysis.each_depending_on(node, |needs_work| {
- worklist.push(needs_work);
- });
+ analysis.each_depending_on(
+ node,
+ |needs_work| { worklist.push(needs_work); },
+ );
}
}
@@ -148,8 +151,13 @@ pub fn analyze<Analysis>(extra: Analysis::Extra) -> Analysis::Output
}
/// Generate the dependency map for analysis
-pub fn generate_dependencies<F>(ctx: &BindgenContext, consider_edge: F) -> HashMap<ItemId, Vec<ItemId>>
- where F: Fn(EdgeKind) -> bool {
+pub fn generate_dependencies<F>(
+ ctx: &BindgenContext,
+ consider_edge: F,
+) -> HashMap<ItemId, Vec<ItemId>>
+where
+ F: Fn(EdgeKind) -> bool,
+{
let mut dependencies = HashMap::new();
for &item in ctx.whitelisted_items() {
@@ -158,14 +166,19 @@ pub fn generate_dependencies<F>(ctx: &BindgenContext, consider_edge: F) -> HashM
{
// We reverse our natural IR graph edges to find dependencies
// between nodes.
- item.trace(ctx, &mut |sub_item: ItemId, edge_kind| {
- if ctx.whitelisted_items().contains(&sub_item) &&
- consider_edge(edge_kind) {
- dependencies.entry(sub_item)
- .or_insert(vec![])
- .push(item);
+ item.trace(
+ ctx,
+ &mut |sub_item: ItemId, edge_kind| {
+ if ctx.whitelisted_items().contains(&sub_item) &&
+ consider_edge(edge_kind)
+ {
+ dependencies.entry(sub_item).or_insert(vec![]).push(
+ item,
+ );
}
- }, &());
+ },
+ &(),
+ );
}
}
dependencies
@@ -313,7 +326,8 @@ mod tests {
}
fn each_depending_on<F>(&self, node: Node, mut f: F)
- where F: FnMut(Node),
+ where
+ F: FnMut(Node),
{
for dep in self.reversed.0[&node].iter() {
f(*dep);
@@ -334,7 +348,8 @@ mod tests {
println!("reachable = {:#?}", reachable);
fn nodes<A>(nodes: A) -> HashSet<Node>
- where A: AsRef<[usize]>,
+ where
+ A: AsRef<[usize]>,
{
nodes.as_ref().iter().cloned().map(Node).collect()
}