diff options
-rw-r--r-- | Cargo.lock | 16 | ||||
-rw-r--r-- | Cargo.toml | 1 | ||||
-rw-r--r-- | src/codegen/mod.rs | 7 | ||||
-rw-r--r-- | src/ir/analysis/derive_copy.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/derive_debug.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/derive_default.rs | 7 | ||||
-rw-r--r-- | src/ir/analysis/derive_hash.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/derive_partialeq_or_partialord.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/has_destructor.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/has_float.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/has_type_param_in_array.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/has_vtable.rs | 5 | ||||
-rw-r--r-- | src/ir/analysis/mod.rs | 12 | ||||
-rw-r--r-- | src/ir/analysis/sizedness.rs | 8 | ||||
-rw-r--r-- | src/ir/analysis/template_params.rs | 6 | ||||
-rw-r--r-- | src/ir/comp.rs | 2 | ||||
-rw-r--r-- | src/ir/context.rs | 17 | ||||
-rw-r--r-- | src/lib.rs | 7 |
18 files changed, 69 insertions, 54 deletions
@@ -56,6 +56,7 @@ dependencies = [ "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -72,6 +73,11 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "byteorder" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "cc" version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -139,6 +145,14 @@ dependencies = [ ] [[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "glob" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -389,6 +403,7 @@ dependencies = [ "checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a" "checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0" "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" +"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" "checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16" "checksum cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b" "checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3" @@ -397,6 +412,7 @@ dependencies = [ "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a" "checksum env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e" "checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7" +"checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e" "checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7" @@ -54,6 +54,7 @@ peeking_take_while = "0.1.2" quote = { version = "0.6", default-features = false } regex = "1.0" which = "2.0" +fxhash = "0.2" # New validation in 0.3.6 breaks bindgen-integration: # https://github.com/alexcrichton/proc-macro2/commit/489c642. proc-macro2 = { version = "0.4", default-features = false } diff --git a/src/codegen/mod.rs b/src/codegen/mod.rs index 9a7bf897..edaf7afe 100644 --- a/src/codegen/mod.rs +++ b/src/codegen/mod.rs @@ -43,12 +43,13 @@ use proc_macro2::{self, Ident, Span}; use std; use std::borrow::Cow; use std::cell::Cell; -use std::collections::{HashSet, VecDeque}; -use std::collections::hash_map::{Entry, HashMap}; +use std::collections::VecDeque; +use std::collections::hash_map::Entry; use std::fmt::Write; use std::iter; use std::ops; use std::str::FromStr; +use {HashMap, HashSet}; // Name of type defined in constified enum module pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type"; @@ -2638,7 +2639,7 @@ impl CodeGenerator for Enum { ); // A map where we keep a value -> variant relation. - let mut seen_values = HashMap::<_, Ident>::new(); + let mut seen_values = HashMap::<_, Ident>::default(); let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &()); let is_toplevel = item.is_toplevel(ctx); diff --git a/src/ir/analysis/derive_copy.rs b/src/ir/analysis/derive_copy.rs index 9d0bcd19..55d30097 100644 --- a/src/ir/analysis/derive_copy.rs +++ b/src/ir/analysis/derive_copy.rs @@ -11,8 +11,7 @@ use ir::template::TemplateParameters; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether copy cannot be derived. /// @@ -103,7 +102,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveCopy<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveCopy<'ctx> { - let cannot_derive_copy = HashSet::new(); + let cannot_derive_copy = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveCopy { diff --git a/src/ir/analysis/derive_debug.rs b/src/ir/analysis/derive_debug.rs index 9210148a..6580a68b 100644 --- a/src/ir/analysis/derive_debug.rs +++ b/src/ir/analysis/derive_debug.rs @@ -10,8 +10,7 @@ use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether debug cannot be derived. /// @@ -104,7 +103,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveDebug<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDebug<'ctx> { - let cannot_derive_debug = HashSet::new(); + let cannot_derive_debug = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveDebug { diff --git a/src/ir/analysis/derive_default.rs b/src/ir/analysis/derive_default.rs index 2ff07ce9..904cabaa 100644 --- a/src/ir/analysis/derive_default.rs +++ b/src/ir/analysis/derive_default.rs @@ -12,8 +12,7 @@ use ir::traversal::EdgeKind; use ir::traversal::Trace; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether default cannot be derived. /// @@ -99,8 +98,8 @@ impl<'ctx> MonotoneFramework for CannotDeriveDefault<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDefault<'ctx> { - let mut dependencies = HashMap::new(); - let cannot_derive_default = HashSet::new(); + let mut dependencies = HashMap::default(); + let cannot_derive_default = HashSet::default(); let whitelisted_items: HashSet<_> = ctx.whitelisted_items().iter().cloned().collect(); diff --git a/src/ir/analysis/derive_hash.rs b/src/ir/analysis/derive_hash.rs index eee6d6f0..6c8b3976 100644 --- a/src/ir/analysis/derive_hash.rs +++ b/src/ir/analysis/derive_hash.rs @@ -10,8 +10,7 @@ use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether hash cannot be derived. /// @@ -96,7 +95,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveHash<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveHash<'ctx> { - let cannot_derive_hash = HashSet::new(); + let cannot_derive_hash = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveHash { diff --git a/src/ir/analysis/derive_partialeq_or_partialord.rs b/src/ir/analysis/derive_partialeq_or_partialord.rs index 5a9a21c5..a64fdf38 100644 --- a/src/ir/analysis/derive_partialeq_or_partialord.rs +++ b/src/ir/analysis/derive_partialeq_or_partialord.rs @@ -9,8 +9,7 @@ use ir::item::{Item, IsOpaque}; use ir::traversal::{EdgeKind, Trace}; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::{TypeKind, Type}; -use std::collections::HashMap; -use std::collections::hash_map::Entry; +use {HashMap, Entry}; /// An analysis that finds for each IR item whether `PartialEq`/`PartialOrd` /// cannot be derived. @@ -326,7 +325,7 @@ impl<'ctx> MonotoneFramework for CannotDerivePartialEqOrPartialOrd<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> CannotDerivePartialEqOrPartialOrd<'ctx> { - let can_derive_partialeq_or_partialord = HashMap::new(); + let can_derive_partialeq_or_partialord = HashMap::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDerivePartialEqOrPartialOrd { diff --git a/src/ir/analysis/has_destructor.rs b/src/ir/analysis/has_destructor.rs index c87b7e25..c79b3645 100644 --- a/src/ir/analysis/has_destructor.rs +++ b/src/ir/analysis/has_destructor.rs @@ -5,8 +5,7 @@ use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::comp::{CompKind, Field, FieldMethods}; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether it has a destructor or not /// @@ -73,7 +72,7 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> Self { - let have_destructor = HashSet::new(); + let have_destructor = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasDestructorAnalysis { diff --git a/src/ir/analysis/has_float.rs b/src/ir/analysis/has_float.rs index 92bbe36c..69cfcc85 100644 --- a/src/ir/analysis/has_float.rs +++ b/src/ir/analysis/has_float.rs @@ -1,8 +1,7 @@ //! Determining which types has float. use super::{ConstrainResult, MonotoneFramework, generate_dependencies}; -use std::collections::HashSet; -use std::collections::HashMap; +use {HashSet, HashMap}; use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; @@ -84,7 +83,7 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> { type Output = HashSet<ItemId>; fn new(ctx: &'ctx BindgenContext) -> HasFloat<'ctx> { - let has_float = HashSet::new(); + let has_float = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasFloat { diff --git a/src/ir/analysis/has_type_param_in_array.rs b/src/ir/analysis/has_type_param_in_array.rs index 98288d3c..98959f0f 100644 --- a/src/ir/analysis/has_type_param_in_array.rs +++ b/src/ir/analysis/has_type_param_in_array.rs @@ -6,8 +6,7 @@ use ir::comp::FieldMethods; use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether it has array or not. /// @@ -92,7 +91,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> HasTypeParameterInArray<'ctx> { - let has_type_parameter_in_array = HashSet::new(); + let has_type_parameter_in_array = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasTypeParameterInArray { diff --git a/src/ir/analysis/has_vtable.rs b/src/ir/analysis/has_vtable.rs index f3f2a695..410fca05 100644 --- a/src/ir/analysis/has_vtable.rs +++ b/src/ir/analysis/has_vtable.rs @@ -5,9 +5,8 @@ use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; use std::cmp; -use std::collections::HashMap; -use std::collections::hash_map::Entry; use std::ops; +use {HashMap, Entry}; /// The result of the `HasVtableAnalysis` for an individual item. #[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)] @@ -148,7 +147,7 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> { type Output = HashMap<ItemId, HasVtableResult>; fn new(ctx: &'ctx BindgenContext) -> HasVtableAnalysis<'ctx> { - let have_vtable = HashMap::new(); + let have_vtable = HashMap::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasVtableAnalysis { diff --git a/src/ir/analysis/mod.rs b/src/ir/analysis/mod.rs index 64958c07..7d6241ba 100644 --- a/src/ir/analysis/mod.rs +++ b/src/ir/analysis/mod.rs @@ -64,7 +64,7 @@ pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult}; use ir::context::{BindgenContext, ItemId}; use ir::traversal::{EdgeKind, Trace}; -use std::collections::HashMap; +use HashMap; use std::fmt; use std::ops; @@ -190,7 +190,7 @@ pub fn generate_dependencies<F>( where F: Fn(EdgeKind) -> bool, { - let mut dependencies = HashMap::new(); + let mut dependencies = HashMap::default(); for &item in ctx.whitelisted_items() { dependencies.entry(item).or_insert(vec![]); @@ -219,7 +219,7 @@ where #[cfg(test)] mod tests { use super::*; - use std::collections::{HashMap, HashSet}; + use {HashMap, HashSet}; // Here we find the set of nodes that are reachable from any given // node. This is a lattice mapping nodes to subsets of all nodes. Our join @@ -334,14 +334,14 @@ mod tests { // implementation. Don't copy this code outside of this test! let original_size = - self.reachable.entry(node).or_insert(HashSet::new()).len(); + self.reachable.entry(node).or_insert(HashSet::default()).len(); for sub_node in self.graph.0[&node].iter() { self.reachable.get_mut(&node).unwrap().insert(*sub_node); let sub_reachable = self.reachable .entry(*sub_node) - .or_insert(HashSet::new()) + .or_insert(HashSet::default()) .clone(); for transitive in sub_reachable { @@ -386,7 +386,7 @@ mod tests { nodes.as_ref().iter().cloned().map(Node).collect() } - let mut expected = HashMap::new(); + let mut expected = HashMap::default(); expected.insert(Node(1), nodes([3, 4, 5, 6, 7, 8])); expected.insert(Node(2), nodes([2])); expected.insert(Node(3), nodes([3, 4, 5, 6, 7, 8])); diff --git a/src/ir/analysis/sizedness.rs b/src/ir/analysis/sizedness.rs index 9c074355..12d679bb 100644 --- a/src/ir/analysis/sizedness.rs +++ b/src/ir/analysis/sizedness.rs @@ -5,10 +5,8 @@ use ir::context::{BindgenContext, TypeId}; use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::TypeKind; -use std::cmp; -use std::collections::HashMap; -use std::collections::hash_map::Entry; -use std::ops; +use std::{cmp, ops}; +use {HashMap, Entry}; /// The result of the `Sizedness` analysis for an individual item. /// @@ -194,7 +192,7 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> { }) .collect(); - let sized = HashMap::new(); + let sized = HashMap::default(); SizednessAnalysis { ctx, diff --git a/src/ir/analysis/template_params.rs b/src/ir/analysis/template_params.rs index b326e6b5..bd1b51a2 100644 --- a/src/ir/analysis/template_params.rs +++ b/src/ir/analysis/template_params.rs @@ -94,7 +94,7 @@ use ir::item::{Item, ItemSet}; use ir::template::{TemplateInstantiation, TemplateParameters}; use ir::traversal::{EdgeKind, Trace}; use ir::ty::TypeKind; -use std::collections::{HashMap, HashSet}; +use {HashMap, HashSet}; /// An analysis that finds for each IR item its set of template parameters that /// it uses. @@ -373,8 +373,8 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> UsedTemplateParameters<'ctx> { - let mut used = HashMap::new(); - let mut dependencies = HashMap::new(); + let mut used = HashMap::default(); + let mut dependencies = HashMap::default(); let whitelisted_items: HashSet<_> = ctx.whitelisted_items().iter().cloned().collect(); diff --git a/src/ir/comp.rs b/src/ir/comp.rs index 704c1776..fd4c8274 100644 --- a/src/ir/comp.rs +++ b/src/ir/comp.rs @@ -17,7 +17,7 @@ use peeking_take_while::PeekableExt; use std::cmp; use std::io; use std::mem; -use std::collections::HashMap; +use HashMap; /// The kind of compound type. #[derive(Debug, Copy, Clone, PartialEq)] diff --git a/src/ir/context.rs b/src/ir/context.rs index 17dd8512..f2399f51 100644 --- a/src/ir/context.rs +++ b/src/ir/context.rs @@ -27,10 +27,11 @@ use parse::ClangItemParser; use proc_macro2::{Ident, Span}; use std::borrow::Cow; use std::cell::Cell; -use std::collections::{HashMap, HashSet, hash_map}; use std::collections::btree_map::{self, BTreeMap}; use std::iter::IntoIterator; use std::mem; +use std::collections::HashMap as StdHashMap; +use {HashMap, HashSet, Entry}; /// An identifier for some kind of IR item. #[derive(Debug, Copy, Clone, Eq, PartialOrd, Ord, Hash)] @@ -348,10 +349,12 @@ pub struct BindgenContext { /// potentially break that assumption. currently_parsed_types: Vec<PartialType>, - /// A HashSet with all the already parsed macro names. This is done to avoid + /// A map with all the already parsed macro names. This is done to avoid /// hard errors while parsing duplicated macros, as well to allow macro /// expression parsing. - parsed_macros: HashMap<Vec<u8>, cexpr::expr::EvalResult>, + /// + /// This needs to be an std::HashMap because the cexpr API requires it. + parsed_macros: StdHashMap<Vec<u8>, cexpr::expr::EvalResult>, /// The active replacements collected from replaces="xxx" annotations. replacements: HashMap<Vec<String>, ItemId>, @@ -1380,7 +1383,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" } else { // If you aren't recursively whitelisting, then we can't really make // any sense of template parameter usage, and you're on your own. - let mut used_params = HashMap::new(); + let mut used_params = HashMap::default(); for &id in self.whitelisted_items() { used_params.entry(id).or_insert( id.self_template_params(self).into_iter().map(|p| p.into()).collect() @@ -2079,7 +2082,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" } /// Get the currently parsed macros. - pub fn parsed_macros(&self) -> &HashMap<Vec<u8>, cexpr::expr::EvalResult> { + pub fn parsed_macros(&self) -> &StdHashMap<Vec<u8>, cexpr::expr::EvalResult> { debug_assert!(!self.in_codegen_phase()); &self.parsed_macros } @@ -2105,7 +2108,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// and implies that the original type is hidden. pub fn replace(&mut self, name: &[String], potential_ty: ItemId) { match self.replacements.entry(name.into()) { - hash_map::Entry::Vacant(entry) => { + Entry::Vacant(entry) => { debug!( "Defining replacement for {:?} as {:?}", name, @@ -2113,7 +2116,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" ); entry.insert(potential_ty); } - hash_map::Entry::Occupied(occupied) => { + Entry::Occupied(occupied) => { warn!( "Replacement for {:?} already defined as {:?}; \ ignoring duplicate replacement definition as {:?}", @@ -23,6 +23,7 @@ extern crate cexpr; #[allow(unused_extern_crates)] extern crate cfg_if; extern crate clang_sys; +extern crate fxhash; #[macro_use] extern crate lazy_static; extern crate peeking_take_while; @@ -88,7 +89,6 @@ use regex_set::RegexSet; pub use codegen::EnumVariation; use std::borrow::Cow; -use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{self, Write}; use std::iter; @@ -96,6 +96,11 @@ use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::Arc; +// Some convenient typedefs for a fast hash map and hash set. +type HashMap<K, V> = ::fxhash::FxHashMap<K, V>; +type HashSet<K> = ::fxhash::FxHashSet<K>; +pub(crate) use ::std::collections::hash_map::Entry; + fn args_are_cpp(clang_args: &[String]) -> bool { return clang_args .windows(2) |