/rust/registry/src/index.crates.io-1949cf8c6b5b557f/indexmap-2.13.0/src/inner.rs
Line | Count | Source |
1 | | //! This is the core implementation that doesn't depend on the hasher at all. |
2 | | //! |
3 | | //! The methods of `Core` don't use any Hash properties of K. |
4 | | //! |
5 | | //! It's cleaner to separate them out, then the compiler checks that we are not |
6 | | //! using Hash at all in these methods. |
7 | | //! |
8 | | //! However, we should probably not let this show in the public API or docs. |
9 | | |
10 | | mod entry; |
11 | | mod extract; |
12 | | |
13 | | use alloc::vec::{self, Vec}; |
14 | | use core::mem; |
15 | | use core::ops::RangeBounds; |
16 | | use hashbrown::hash_table; |
17 | | |
18 | | use crate::util::simplify_range; |
19 | | use crate::{Bucket, Equivalent, HashValue, TryReserveError}; |
20 | | |
21 | | type Indices = hash_table::HashTable<usize>; |
22 | | type Entries<K, V> = Vec<Bucket<K, V>>; |
23 | | |
24 | | pub use entry::{OccupiedEntry, VacantEntry}; |
25 | | pub(crate) use extract::ExtractCore; |
26 | | |
27 | | /// Core of the map that does not depend on S |
28 | | #[cfg_attr(feature = "test_debug", derive(Debug))] |
29 | | pub(crate) struct Core<K, V> { |
30 | | /// indices mapping from the entry hash to its index. |
31 | | indices: Indices, |
32 | | /// entries is a dense vec maintaining entry order. |
33 | | entries: Entries<K, V>, |
34 | | } |
35 | | |
36 | | #[inline(always)] |
37 | 181k | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { |
38 | 86.4k | move |&i| entries[i].hash.get() indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>::{closure#0}Line | Count | Source | 38 | 3.58k | move |&i| entries[i].hash.get() |
indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>::{closure#0}Line | Count | Source | 38 | 10.7k | move |&i| entries[i].hash.get() |
Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>::{closure#0}indexmap::inner::get_hash::<naga::ir::Type, ()>::{closure#0}Line | Count | Source | 38 | 71.7k | move |&i| entries[i].hash.get() |
indexmap::inner::get_hash::<naga::front::wgsl::parse::ast::Dependency, ()>::{closure#0}Line | Count | Source | 38 | 344 | move |&i| entries[i].hash.get() |
Unexecuted instantiation: indexmap::inner::get_hash::<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<(u32, u32), ()>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<usize, alloc::vec::Vec<alloc::string::String>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>::{closure#0}Unexecuted instantiation: indexmap::inner::get_hash::<u32, (usize, alloc::vec::Vec<i32>)>::{closure#0} |
39 | 181k | } indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String> Line | Count | Source | 37 | 3.59k | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 3.59k | } |
indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)> Line | Count | Source | 37 | 5.39k | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 5.39k | } |
Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>> Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>> Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>> Unexecuted instantiation: indexmap::inner::get_hash::<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>> Unexecuted instantiation: indexmap::inner::get_hash::<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)> Unexecuted instantiation: indexmap::inner::get_hash::<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>> indexmap::inner::get_hash::<naga::ir::Type, ()> Line | Count | Source | 37 | 57.2k | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 57.2k | } |
indexmap::inner::get_hash::<naga::front::wgsl::parse::ast::Dependency, ()> Line | Count | Source | 37 | 114k | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 114k | } |
Unexecuted instantiation: indexmap::inner::get_hash::<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>> indexmap::inner::get_hash::<(u32, u32), ()> Line | Count | Source | 37 | 4 | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 4 | } |
Unexecuted instantiation: indexmap::inner::get_hash::<usize, alloc::vec::Vec<alloc::string::String>> indexmap::inner::get_hash::<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>> Line | Count | Source | 37 | 19 | fn get_hash<K, V>(entries: &[Bucket<K, V>]) -> impl Fn(&usize) -> u64 + use<'_, K, V> { | 38 | | move |&i| entries[i].hash.get() | 39 | 19 | } |
Unexecuted instantiation: indexmap::inner::get_hash::<u32, (usize, alloc::vec::Vec<i32>)> |
40 | | |
41 | | #[inline] |
42 | 181k | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( |
43 | 181k | key: &'a Q, |
44 | 181k | entries: &'a [Bucket<K, V>], |
45 | 181k | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { |
46 | 37.2k | move |&i| Q::equivalent(key, &entries[i].key) indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String, naga::arena::handle::Handle<naga::ir::Expression>>::{closure#0}Line | Count | Source | 46 | 16 | move |&i| Q::equivalent(key, &entries[i].key) |
indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span), naga::arena::handle::Handle<naga::ir::Expression>>::{closure#0}Line | Count | Source | 46 | 1.71k | move |&i| Q::equivalent(key, &entries[i].key) |
Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::GlobalVariable>>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Type>>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Constant>>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Function>>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span), naga::diagnostic_filter::FilterableTriggeringRule>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>, naga::ir::PredeclaredType>::{closure#0}indexmap::inner::equivalent::<naga::ir::Type, (), naga::ir::Type>::{closure#0}Line | Count | Source | 46 | 14.8k | move |&i| Q::equivalent(key, &entries[i].key) |
indexmap::inner::equivalent::<naga::front::wgsl::parse::ast::Dependency, (), naga::front::wgsl::parse::ast::Dependency>::{closure#0}Line | Count | Source | 46 | 20.6k | move |&i| Q::equivalent(key, &entries[i].key) |
Unexecuted instantiation: indexmap::inner::equivalent::<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>, (naga::arena::handle::Handle<naga::ir::Type>, usize)>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<(u32, u32), (), (u32, u32)>::{closure#0}Unexecuted instantiation: indexmap::inner::equivalent::<usize, alloc::vec::Vec<alloc::string::String>, usize>::{closure#0}indexmap::inner::equivalent::<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>, u32>::{closure#0}Line | Count | Source | 46 | 4 | move |&i| Q::equivalent(key, &entries[i].key) |
Unexecuted instantiation: indexmap::inner::equivalent::<u32, (usize, alloc::vec::Vec<i32>), u32>::{closure#0} |
47 | 181k | } indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String, naga::arena::handle::Handle<naga::ir::Expression>> Line | Count | Source | 42 | 3.59k | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 3.59k | key: &'a Q, | 44 | 3.59k | entries: &'a [Bucket<K, V>], | 45 | 3.59k | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 3.59k | } |
indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span), naga::arena::handle::Handle<naga::ir::Expression>> Line | Count | Source | 42 | 5.39k | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 5.39k | key: &'a Q, | 44 | 5.39k | entries: &'a [Bucket<K, V>], | 45 | 5.39k | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 5.39k | } |
Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::GlobalVariable>> Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Type>> Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Constant>> Unexecuted instantiation: indexmap::inner::equivalent::<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>, naga::arena::handle::Handle<naga::ir::Function>> Unexecuted instantiation: indexmap::inner::equivalent::<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span), naga::diagnostic_filter::FilterableTriggeringRule> Unexecuted instantiation: indexmap::inner::equivalent::<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>, naga::ir::PredeclaredType> indexmap::inner::equivalent::<naga::ir::Type, (), naga::ir::Type> Line | Count | Source | 42 | 57.2k | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 57.2k | key: &'a Q, | 44 | 57.2k | entries: &'a [Bucket<K, V>], | 45 | 57.2k | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 57.2k | } |
indexmap::inner::equivalent::<naga::front::wgsl::parse::ast::Dependency, (), naga::front::wgsl::parse::ast::Dependency> Line | Count | Source | 42 | 114k | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 114k | key: &'a Q, | 44 | 114k | entries: &'a [Bucket<K, V>], | 45 | 114k | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 114k | } |
Unexecuted instantiation: indexmap::inner::equivalent::<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>, (naga::arena::handle::Handle<naga::ir::Type>, usize)> indexmap::inner::equivalent::<(u32, u32), (), (u32, u32)> Line | Count | Source | 42 | 4 | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 4 | key: &'a Q, | 44 | 4 | entries: &'a [Bucket<K, V>], | 45 | 4 | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 4 | } |
Unexecuted instantiation: indexmap::inner::equivalent::<usize, alloc::vec::Vec<alloc::string::String>, usize> indexmap::inner::equivalent::<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>, u32> Line | Count | Source | 42 | 23 | fn equivalent<'a, K, V, Q: ?Sized + Equivalent<K>>( | 43 | 23 | key: &'a Q, | 44 | 23 | entries: &'a [Bucket<K, V>], | 45 | 23 | ) -> impl Fn(&usize) -> bool + use<'a, K, V, Q> { | 46 | | move |&i| Q::equivalent(key, &entries[i].key) | 47 | 23 | } |
Unexecuted instantiation: indexmap::inner::equivalent::<u32, (usize, alloc::vec::Vec<i32>), u32> |
48 | | |
49 | | #[inline] |
50 | 0 | fn erase_index(table: &mut Indices, hash: HashValue, index: usize) { |
51 | 0 | if let Ok(entry) = table.find_entry(hash.get(), move |&i| i == index) { |
52 | 0 | entry.remove(); |
53 | 0 | } else if cfg!(debug_assertions) { |
54 | 0 | panic!("index not found"); |
55 | 0 | } |
56 | 0 | } |
57 | | |
58 | | #[inline] |
59 | 0 | fn update_index(table: &mut Indices, hash: HashValue, old: usize, new: usize) { |
60 | 0 | let index = table |
61 | 0 | .find_mut(hash.get(), move |&i| i == old) |
62 | 0 | .expect("index not found"); |
63 | 0 | *index = new; |
64 | 0 | } |
65 | | |
66 | | /// Inserts many entries into the indices table without reallocating, |
67 | | /// and without regard for duplication. |
68 | | /// |
69 | | /// ***Panics*** if there is not sufficient capacity already. |
70 | 16 | fn insert_bulk_no_grow<K, V>(indices: &mut Indices, entries: &[Bucket<K, V>]) { |
71 | 16 | assert!(indices.capacity() - indices.len() >= entries.len()); |
72 | 16 | for entry in entries { |
73 | 0 | indices.insert_unique(entry.hash.get(), indices.len(), |_| unreachable!()); Unexecuted instantiation: indexmap::inner::insert_bulk_no_grow::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>::{closure#0}Unexecuted instantiation: indexmap::inner::insert_bulk_no_grow::<naga::ir::Type, ()>::{closure#0} |
74 | | } |
75 | 16 | } indexmap::inner::insert_bulk_no_grow::<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String> Line | Count | Source | 70 | 2 | fn insert_bulk_no_grow<K, V>(indices: &mut Indices, entries: &[Bucket<K, V>]) { | 71 | 2 | assert!(indices.capacity() - indices.len() >= entries.len()); | 72 | 2 | for entry in entries { | 73 | 0 | indices.insert_unique(entry.hash.get(), indices.len(), |_| unreachable!()); | 74 | | } | 75 | 2 | } |
indexmap::inner::insert_bulk_no_grow::<naga::ir::Type, ()> Line | Count | Source | 70 | 14 | fn insert_bulk_no_grow<K, V>(indices: &mut Indices, entries: &[Bucket<K, V>]) { | 71 | 14 | assert!(indices.capacity() - indices.len() >= entries.len()); | 72 | 14 | for entry in entries { | 73 | 0 | indices.insert_unique(entry.hash.get(), indices.len(), |_| unreachable!()); | 74 | | } | 75 | 14 | } |
|
76 | | |
77 | | impl<K, V> Clone for Core<K, V> |
78 | | where |
79 | | K: Clone, |
80 | | V: Clone, |
81 | | { |
82 | | fn clone(&self) -> Self { |
83 | | let mut new = Self::new(); |
84 | | new.clone_from(self); |
85 | | new |
86 | | } |
87 | | |
88 | | fn clone_from(&mut self, other: &Self) { |
89 | | self.indices.clone_from(&other.indices); |
90 | | if self.entries.capacity() < other.entries.len() { |
91 | | // If we must resize, match the indices capacity. |
92 | | let additional = other.entries.len() - self.entries.len(); |
93 | | self.reserve_entries(additional); |
94 | | } |
95 | | self.entries.clone_from(&other.entries); |
96 | | } |
97 | | } |
98 | | |
99 | | impl<K, V> Core<K, V> { |
100 | | /// The maximum capacity before the `entries` allocation would exceed `isize::MAX`. |
101 | | const MAX_ENTRIES_CAPACITY: usize = (isize::MAX as usize) / size_of::<Bucket<K, V>>(); |
102 | | |
103 | | #[inline] |
104 | 958k | pub(crate) const fn new() -> Self { |
105 | 958k | Core { |
106 | 958k | indices: Indices::new(), |
107 | 958k | entries: Vec::new(), |
108 | 958k | } |
109 | 958k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::new Line | Count | Source | 104 | 66 | pub(crate) const fn new() -> Self { | 105 | 66 | Core { | 106 | 66 | indices: Indices::new(), | 107 | 66 | entries: Vec::new(), | 108 | 66 | } | 109 | 66 | } |
<indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::new Line | Count | Source | 104 | 31 | pub(crate) const fn new() -> Self { | 105 | 31 | Core { | 106 | 31 | indices: Indices::new(), | 107 | 31 | entries: Vec::new(), | 108 | 31 | } | 109 | 31 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::new Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::new Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::new Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::new <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::new Line | Count | Source | 104 | 478k | pub(crate) const fn new() -> Self { | 105 | 478k | Core { | 106 | 478k | indices: Indices::new(), | 107 | 478k | entries: Vec::new(), | 108 | 478k | } | 109 | 478k | } |
<indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::new Line | Count | Source | 104 | 54 | pub(crate) const fn new() -> Self { | 105 | 54 | Core { | 106 | 54 | indices: Indices::new(), | 107 | 54 | entries: Vec::new(), | 108 | 54 | } | 109 | 54 | } |
<indexmap::inner::Core<naga::ir::Type, ()>>::new Line | Count | Source | 104 | 62 | pub(crate) const fn new() -> Self { | 105 | 62 | Core { | 106 | 62 | indices: Indices::new(), | 107 | 62 | entries: Vec::new(), | 108 | 62 | } | 109 | 62 | } |
<indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::new Line | Count | Source | 104 | 478k | pub(crate) const fn new() -> Self { | 105 | 478k | Core { | 106 | 478k | indices: Indices::new(), | 107 | 478k | entries: Vec::new(), | 108 | 478k | } | 109 | 478k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::new <indexmap::inner::Core<(u32, u32), ()>>::new Line | Count | Source | 104 | 24 | pub(crate) const fn new() -> Self { | 105 | 24 | Core { | 106 | 24 | indices: Indices::new(), | 107 | 24 | entries: Vec::new(), | 108 | 24 | } | 109 | 24 | } |
Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::new <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::new Line | Count | Source | 104 | 24 | pub(crate) const fn new() -> Self { | 105 | 24 | Core { | 106 | 24 | indices: Indices::new(), | 107 | 24 | entries: Vec::new(), | 108 | 24 | } | 109 | 24 | } |
<indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::new Line | Count | Source | 104 | 24 | pub(crate) const fn new() -> Self { | 105 | 24 | Core { | 106 | 24 | indices: Indices::new(), | 107 | 24 | entries: Vec::new(), | 108 | 24 | } | 109 | 24 | } |
|
110 | | |
111 | | #[inline] |
112 | 1 | pub(crate) fn with_capacity(n: usize) -> Self { |
113 | 1 | Core { |
114 | 1 | indices: Indices::with_capacity(n), |
115 | 1 | entries: Vec::with_capacity(n), |
116 | 1 | } |
117 | 1 | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::with_capacity Line | Count | Source | 112 | 1 | pub(crate) fn with_capacity(n: usize) -> Self { | 113 | 1 | Core { | 114 | 1 | indices: Indices::with_capacity(n), | 115 | 1 | entries: Vec::with_capacity(n), | 116 | 1 | } | 117 | 1 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::ir::Type, ()>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<(u32, u32), ()>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::with_capacity Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::with_capacity |
118 | | |
119 | | #[inline] |
120 | 315k | pub(crate) fn into_entries(self) -> Entries<K, V> { |
121 | 315k | self.entries |
122 | 315k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::into_entries Line | Count | Source | 120 | 9 | pub(crate) fn into_entries(self) -> Entries<K, V> { | 121 | 9 | self.entries | 122 | 9 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::into_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::into_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::into_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::into_entries <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::into_entries Line | Count | Source | 120 | 315k | pub(crate) fn into_entries(self) -> Entries<K, V> { | 121 | 315k | self.entries | 122 | 315k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::into_entries |
123 | | |
124 | | #[inline] |
125 | 231k | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { |
126 | 231k | &self.entries |
127 | 231k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::as_entries Line | Count | Source | 125 | 1 | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { | 126 | 1 | &self.entries | 127 | 1 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::as_entries <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::as_entries Line | Count | Source | 125 | 8 | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { | 126 | 8 | &self.entries | 127 | 8 | } |
<indexmap::inner::Core<naga::ir::Type, ()>>::as_entries Line | Count | Source | 125 | 211k | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { | 126 | 211k | &self.entries | 127 | 211k | } |
<indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::as_entries Line | Count | Source | 125 | 19.3k | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { | 126 | 19.3k | &self.entries | 127 | 19.3k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::as_entries Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::as_entries <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::as_entries Line | Count | Source | 125 | 1 | pub(crate) fn as_entries(&self) -> &[Bucket<K, V>] { | 126 | 1 | &self.entries | 127 | 1 | } |
Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::as_entries |
128 | | |
129 | | #[inline] |
130 | 8 | pub(crate) fn as_entries_mut(&mut self) -> &mut [Bucket<K, V>] { |
131 | 8 | &mut self.entries |
132 | 8 | } <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::as_entries_mut Line | Count | Source | 130 | 8 | pub(crate) fn as_entries_mut(&mut self) -> &mut [Bucket<K, V>] { | 131 | 8 | &mut self.entries | 132 | 8 | } |
Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::as_entries_mut |
133 | | |
134 | | pub(crate) fn with_entries<F>(&mut self, f: F) |
135 | | where |
136 | | F: FnOnce(&mut [Bucket<K, V>]), |
137 | | { |
138 | | f(&mut self.entries); |
139 | | self.rebuild_hash_table(); |
140 | | } |
141 | | |
142 | | #[inline] |
143 | 163k | pub(crate) fn len(&self) -> usize { |
144 | 163k | debug_assert_eq!(self.entries.len(), self.indices.len()); |
145 | 163k | self.indices.len() |
146 | 163k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::len Line | Count | Source | 143 | 11 | pub(crate) fn len(&self) -> usize { | 144 | 11 | debug_assert_eq!(self.entries.len(), self.indices.len()); | 145 | 11 | self.indices.len() | 146 | 11 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::len Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::len Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::len Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::len <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::len Line | Count | Source | 143 | 163k | pub(crate) fn len(&self) -> usize { | 144 | 163k | debug_assert_eq!(self.entries.len(), self.indices.len()); | 145 | 163k | self.indices.len() | 146 | 163k | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::len <indexmap::inner::Core<naga::ir::Type, ()>>::len Line | Count | Source | 143 | 16 | pub(crate) fn len(&self) -> usize { | 144 | 16 | debug_assert_eq!(self.entries.len(), self.indices.len()); | 145 | 16 | self.indices.len() | 146 | 16 | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::len Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::len <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::len Line | Count | Source | 143 | 2 | pub(crate) fn len(&self) -> usize { | 144 | 2 | debug_assert_eq!(self.entries.len(), self.indices.len()); | 145 | 2 | self.indices.len() | 146 | 2 | } |
|
147 | | |
148 | | #[inline] |
149 | | pub(crate) fn capacity(&self) -> usize { |
150 | | Ord::min(self.indices.capacity(), self.entries.capacity()) |
151 | | } |
152 | | |
153 | 44 | pub(crate) fn clear(&mut self) { |
154 | 44 | self.indices.clear(); |
155 | 44 | self.entries.clear(); |
156 | 44 | } <indexmap::inner::Core<(u32, u32), ()>>::clear Line | Count | Source | 153 | 22 | pub(crate) fn clear(&mut self) { | 154 | 22 | self.indices.clear(); | 155 | 22 | self.entries.clear(); | 156 | 22 | } |
<indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::clear Line | Count | Source | 153 | 22 | pub(crate) fn clear(&mut self) { | 154 | 22 | self.indices.clear(); | 155 | 22 | self.entries.clear(); | 156 | 22 | } |
Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::clear |
157 | | |
158 | | pub(crate) fn truncate(&mut self, len: usize) { |
159 | | if len < self.len() { |
160 | | self.erase_indices(len, self.entries.len()); |
161 | | self.entries.truncate(len); |
162 | | } |
163 | | } |
164 | | |
165 | | #[track_caller] |
166 | 9 | pub(crate) fn drain<R>(&mut self, range: R) -> vec::Drain<'_, Bucket<K, V>> |
167 | 9 | where |
168 | 9 | R: RangeBounds<usize>, |
169 | | { |
170 | 9 | let range = simplify_range(range, self.entries.len()); |
171 | 9 | self.erase_indices(range.start, range.end); |
172 | 9 | self.entries.drain(range) |
173 | 9 | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::drain::<core::ops::range::RangeFull> Line | Count | Source | 166 | 1 | pub(crate) fn drain<R>(&mut self, range: R) -> vec::Drain<'_, Bucket<K, V>> | 167 | 1 | where | 168 | 1 | R: RangeBounds<usize>, | 169 | | { | 170 | 1 | let range = simplify_range(range, self.entries.len()); | 171 | 1 | self.erase_indices(range.start, range.end); | 172 | 1 | self.entries.drain(range) | 173 | 1 | } |
<indexmap::inner::Core<naga::ir::Type, ()>>::drain::<core::ops::range::RangeFull> Line | Count | Source | 166 | 8 | pub(crate) fn drain<R>(&mut self, range: R) -> vec::Drain<'_, Bucket<K, V>> | 167 | 8 | where | 168 | 8 | R: RangeBounds<usize>, | 169 | | { | 170 | 8 | let range = simplify_range(range, self.entries.len()); | 171 | 8 | self.erase_indices(range.start, range.end); | 172 | 8 | self.entries.drain(range) | 173 | 8 | } |
|
174 | | |
175 | | #[cfg(feature = "rayon")] |
176 | | pub(crate) fn par_drain<R>(&mut self, range: R) -> rayon::vec::Drain<'_, Bucket<K, V>> |
177 | | where |
178 | | K: Send, |
179 | | V: Send, |
180 | | R: RangeBounds<usize>, |
181 | | { |
182 | | use rayon::iter::ParallelDrainRange; |
183 | | let range = simplify_range(range, self.entries.len()); |
184 | | self.erase_indices(range.start, range.end); |
185 | | self.entries.par_drain(range) |
186 | | } |
187 | | |
188 | | #[track_caller] |
189 | | pub(crate) fn split_off(&mut self, at: usize) -> Self { |
190 | | let len = self.entries.len(); |
191 | | assert!( |
192 | | at <= len, |
193 | | "index out of bounds: the len is {len} but the index is {at}. Expected index <= len" |
194 | | ); |
195 | | |
196 | | self.erase_indices(at, self.entries.len()); |
197 | | let entries = self.entries.split_off(at); |
198 | | |
199 | | let mut indices = Indices::with_capacity(entries.len()); |
200 | | insert_bulk_no_grow(&mut indices, &entries); |
201 | | Self { indices, entries } |
202 | | } |
203 | | |
204 | | #[track_caller] |
205 | | pub(crate) fn split_splice<R>(&mut self, range: R) -> (Self, vec::IntoIter<Bucket<K, V>>) |
206 | | where |
207 | | R: RangeBounds<usize>, |
208 | | { |
209 | | let range = simplify_range(range, self.len()); |
210 | | self.erase_indices(range.start, self.entries.len()); |
211 | | let entries = self.entries.split_off(range.end); |
212 | | let drained = self.entries.split_off(range.start); |
213 | | |
214 | | let mut indices = Indices::with_capacity(entries.len()); |
215 | | insert_bulk_no_grow(&mut indices, &entries); |
216 | | (Self { indices, entries }, drained.into_iter()) |
217 | | } |
218 | | |
219 | | /// Append from another map without checking whether items already exist. |
220 | | pub(crate) fn append_unchecked(&mut self, other: &mut Self) { |
221 | | self.reserve(other.len()); |
222 | | insert_bulk_no_grow(&mut self.indices, &other.entries); |
223 | | self.entries.append(&mut other.entries); |
224 | | other.indices.clear(); |
225 | | } |
226 | | |
227 | | /// Reserve capacity for `additional` more key-value pairs. |
228 | 9 | pub(crate) fn reserve(&mut self, additional: usize) { |
229 | 9 | self.indices.reserve(additional, get_hash(&self.entries)); |
230 | | // Only grow entries if necessary, since we also round up capacity. |
231 | 9 | if additional > self.entries.capacity() - self.entries.len() { |
232 | 0 | self.reserve_entries(additional); |
233 | 9 | } |
234 | 9 | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::reserve Line | Count | Source | 228 | 9 | pub(crate) fn reserve(&mut self, additional: usize) { | 229 | 9 | self.indices.reserve(additional, get_hash(&self.entries)); | 230 | | // Only grow entries if necessary, since we also round up capacity. | 231 | 9 | if additional > self.entries.capacity() - self.entries.len() { | 232 | 0 | self.reserve_entries(additional); | 233 | 9 | } | 234 | 9 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::reserve Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::reserve |
235 | | |
236 | | /// Reserve capacity for `additional` more key-value pairs, without over-allocating. |
237 | | pub(crate) fn reserve_exact(&mut self, additional: usize) { |
238 | | self.indices.reserve(additional, get_hash(&self.entries)); |
239 | | self.entries.reserve_exact(additional); |
240 | | } |
241 | | |
242 | | /// Try to reserve capacity for `additional` more key-value pairs. |
243 | | pub(crate) fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { |
244 | | self.indices |
245 | | .try_reserve(additional, get_hash(&self.entries)) |
246 | | .map_err(TryReserveError::from_hashbrown)?; |
247 | | // Only grow entries if necessary, since we also round up capacity. |
248 | | if additional > self.entries.capacity() - self.entries.len() { |
249 | | self.try_reserve_entries(additional) |
250 | | } else { |
251 | | Ok(()) |
252 | | } |
253 | | } |
254 | | |
255 | | /// Try to reserve entries capacity, rounded up to match the indices |
256 | | fn try_reserve_entries(&mut self, additional: usize) -> Result<(), TryReserveError> { |
257 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly |
258 | | // requested more, do it and let them have the resulting error. |
259 | | let new_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); |
260 | | let try_add = new_capacity - self.entries.len(); |
261 | | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { |
262 | | return Ok(()); |
263 | | } |
264 | | self.entries |
265 | | .try_reserve_exact(additional) |
266 | | .map_err(TryReserveError::from_alloc) |
267 | | } |
268 | | |
269 | | /// Try to reserve capacity for `additional` more key-value pairs, without over-allocating. |
270 | | pub(crate) fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { |
271 | | self.indices |
272 | | .try_reserve(additional, get_hash(&self.entries)) |
273 | | .map_err(TryReserveError::from_hashbrown)?; |
274 | | self.entries |
275 | | .try_reserve_exact(additional) |
276 | | .map_err(TryReserveError::from_alloc) |
277 | | } |
278 | | |
279 | | /// Shrink the capacity of the map with a lower bound |
280 | | pub(crate) fn shrink_to(&mut self, min_capacity: usize) { |
281 | | self.indices |
282 | | .shrink_to(min_capacity, get_hash(&self.entries)); |
283 | | self.entries.shrink_to(min_capacity); |
284 | | } |
285 | | |
286 | | /// Remove the last key-value pair |
287 | | pub(crate) fn pop(&mut self) -> Option<(K, V)> { |
288 | | if let Some(entry) = self.entries.pop() { |
289 | | let last = self.entries.len(); |
290 | | erase_index(&mut self.indices, entry.hash, last); |
291 | | Some((entry.key, entry.value)) |
292 | | } else { |
293 | | None |
294 | | } |
295 | | } |
296 | | |
297 | | /// Return the index in `entries` where an equivalent key can be found |
298 | 0 | pub(crate) fn get_index_of<Q>(&self, hash: HashValue, key: &Q) -> Option<usize> |
299 | 0 | where |
300 | 0 | Q: ?Sized + Equivalent<K>, |
301 | | { |
302 | 0 | let eq = equivalent(key, &self.entries); |
303 | 0 | self.indices.find(hash.get(), eq).copied() |
304 | 0 | } Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::get_index_of::<naga::arena::handle::Handle<naga::ir::Expression>> Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::get_index_of::<naga::ir::PredeclaredType> Unexecuted instantiation: <indexmap::inner::Core<naga::ir::Type, ()>>::get_index_of::<naga::ir::Type> Unexecuted instantiation: <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::get_index_of::<u32> Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::get_index_of::<u32> |
305 | | |
306 | | /// Return the index in `entries` where an equivalent key can be found |
307 | | pub(crate) fn get_index_of_raw<F>(&self, hash: HashValue, mut is_match: F) -> Option<usize> |
308 | | where |
309 | | F: FnMut(&K) -> bool, |
310 | | { |
311 | | let eq = move |&i: &usize| is_match(&self.entries[i].key); |
312 | | self.indices.find(hash.get(), eq).copied() |
313 | | } |
314 | | |
315 | | /// Append a key-value pair to `entries`, |
316 | | /// *without* checking whether it already exists. |
317 | 149k | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { |
318 | 149k | if self.entries.len() == self.entries.capacity() { |
319 | 94.2k | // Reserve our own capacity synced to the indices, |
320 | 94.2k | // rather than letting `Vec::push` just double it. |
321 | 94.2k | self.reserve_entries(1); |
322 | 94.2k | } |
323 | 149k | self.entries.push(Bucket { hash, key, value }); |
324 | 149k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::push_entry Line | Count | Source | 317 | 3.59k | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 3.59k | if self.entries.len() == self.entries.capacity() { | 319 | 11 | // Reserve our own capacity synced to the indices, | 320 | 11 | // rather than letting `Vec::push` just double it. | 321 | 11 | self.reserve_entries(1); | 322 | 3.57k | } | 323 | 3.59k | self.entries.push(Bucket { hash, key, value }); | 324 | 3.59k | } |
<indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::push_entry Line | Count | Source | 317 | 5.39k | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 5.39k | if self.entries.len() == self.entries.capacity() { | 319 | 23 | // Reserve our own capacity synced to the indices, | 320 | 23 | // rather than letting `Vec::push` just double it. | 321 | 23 | self.reserve_entries(1); | 322 | 5.36k | } | 323 | 5.39k | self.entries.push(Bucket { hash, key, value }); | 324 | 5.39k | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::push_entry Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::push_entry Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::push_entry Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::push_entry Unexecuted instantiation: <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::push_entry Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::push_entry <indexmap::inner::Core<naga::ir::Type, ()>>::push_entry Line | Count | Source | 317 | 46.0k | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 46.0k | if self.entries.len() == self.entries.capacity() { | 319 | 107 | // Reserve our own capacity synced to the indices, | 320 | 107 | // rather than letting `Vec::push` just double it. | 321 | 107 | self.reserve_entries(1); | 322 | 45.8k | } | 323 | 46.0k | self.entries.push(Bucket { hash, key, value }); | 324 | 46.0k | } |
<indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::push_entry Line | Count | Source | 317 | 94.3k | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 94.3k | if self.entries.len() == self.entries.capacity() { | 319 | 94.0k | // Reserve our own capacity synced to the indices, | 320 | 94.0k | // rather than letting `Vec::push` just double it. | 321 | 94.0k | self.reserve_entries(1); | 322 | 94.0k | } | 323 | 94.3k | self.entries.push(Bucket { hash, key, value }); | 324 | 94.3k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::push_entry <indexmap::inner::Core<(u32, u32), ()>>::push_entry Line | Count | Source | 317 | 4 | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 4 | if self.entries.len() == self.entries.capacity() { | 319 | 4 | // Reserve our own capacity synced to the indices, | 320 | 4 | // rather than letting `Vec::push` just double it. | 321 | 4 | self.reserve_entries(1); | 322 | 4 | } | 323 | 4 | self.entries.push(Bucket { hash, key, value }); | 324 | 4 | } |
Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::push_entry <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::push_entry Line | Count | Source | 317 | 19 | fn push_entry(&mut self, hash: HashValue, key: K, value: V) { | 318 | 19 | if self.entries.len() == self.entries.capacity() { | 319 | 15 | // Reserve our own capacity synced to the indices, | 320 | 15 | // rather than letting `Vec::push` just double it. | 321 | 15 | self.reserve_entries(1); | 322 | 15 | } | 323 | 19 | self.entries.push(Bucket { hash, key, value }); | 324 | 19 | } |
Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::push_entry |
325 | | |
326 | 181k | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) |
327 | 181k | where |
328 | 181k | K: Eq, |
329 | | { |
330 | 181k | let eq = equivalent(&key, &self.entries); |
331 | 181k | let hasher = get_hash(&self.entries); |
332 | 181k | match self.indices.entry(hash.get(), eq, hasher) { |
333 | 31.9k | hash_table::Entry::Occupied(entry) => { |
334 | 31.9k | let i = *entry.get(); |
335 | 31.9k | (i, Some(mem::replace(&mut self.entries[i].value, value))) |
336 | | } |
337 | 149k | hash_table::Entry::Vacant(entry) => { |
338 | 149k | let i = self.entries.len(); |
339 | 149k | entry.insert(i); |
340 | 149k | self.push_entry(hash, key, value); |
341 | 149k | debug_assert_eq!(self.indices.len(), self.entries.len()); |
342 | 149k | (i, None) |
343 | | } |
344 | | } |
345 | 181k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::insert_full Line | Count | Source | 326 | 3.59k | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) | 327 | 3.59k | where | 328 | 3.59k | K: Eq, | 329 | | { | 330 | 3.59k | let eq = equivalent(&key, &self.entries); | 331 | 3.59k | let hasher = get_hash(&self.entries); | 332 | 3.59k | match self.indices.entry(hash.get(), eq, hasher) { | 333 | 0 | hash_table::Entry::Occupied(entry) => { | 334 | 0 | let i = *entry.get(); | 335 | 0 | (i, Some(mem::replace(&mut self.entries[i].value, value))) | 336 | | } | 337 | 3.59k | hash_table::Entry::Vacant(entry) => { | 338 | 3.59k | let i = self.entries.len(); | 339 | 3.59k | entry.insert(i); | 340 | 3.59k | self.push_entry(hash, key, value); | 341 | 3.59k | debug_assert_eq!(self.indices.len(), self.entries.len()); | 342 | 3.59k | (i, None) | 343 | | } | 344 | | } | 345 | 3.59k | } |
<indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::insert_full Line | Count | Source | 326 | 5.39k | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) | 327 | 5.39k | where | 328 | 5.39k | K: Eq, | 329 | | { | 330 | 5.39k | let eq = equivalent(&key, &self.entries); | 331 | 5.39k | let hasher = get_hash(&self.entries); | 332 | 5.39k | match self.indices.entry(hash.get(), eq, hasher) { | 333 | 0 | hash_table::Entry::Occupied(entry) => { | 334 | 0 | let i = *entry.get(); | 335 | 0 | (i, Some(mem::replace(&mut self.entries[i].value, value))) | 336 | | } | 337 | 5.39k | hash_table::Entry::Vacant(entry) => { | 338 | 5.39k | let i = self.entries.len(); | 339 | 5.39k | entry.insert(i); | 340 | 5.39k | self.push_entry(hash, key, value); | 341 | 5.39k | debug_assert_eq!(self.indices.len(), self.entries.len()); | 342 | 5.39k | (i, None) | 343 | | } | 344 | | } | 345 | 5.39k | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::insert_full Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::insert_full Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::insert_full Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::insert_full Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::insert_full <indexmap::inner::Core<naga::ir::Type, ()>>::insert_full Line | Count | Source | 326 | 57.2k | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) | 327 | 57.2k | where | 328 | 57.2k | K: Eq, | 329 | | { | 330 | 57.2k | let eq = equivalent(&key, &self.entries); | 331 | 57.2k | let hasher = get_hash(&self.entries); | 332 | 57.2k | match self.indices.entry(hash.get(), eq, hasher) { | 333 | 11.2k | hash_table::Entry::Occupied(entry) => { | 334 | 11.2k | let i = *entry.get(); | 335 | 11.2k | (i, Some(mem::replace(&mut self.entries[i].value, value))) | 336 | | } | 337 | 46.0k | hash_table::Entry::Vacant(entry) => { | 338 | 46.0k | let i = self.entries.len(); | 339 | 46.0k | entry.insert(i); | 340 | 46.0k | self.push_entry(hash, key, value); | 341 | 46.0k | debug_assert_eq!(self.indices.len(), self.entries.len()); | 342 | 46.0k | (i, None) | 343 | | } | 344 | | } | 345 | 57.2k | } |
<indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::insert_full Line | Count | Source | 326 | 114k | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) | 327 | 114k | where | 328 | 114k | K: Eq, | 329 | | { | 330 | 114k | let eq = equivalent(&key, &self.entries); | 331 | 114k | let hasher = get_hash(&self.entries); | 332 | 114k | match self.indices.entry(hash.get(), eq, hasher) { | 333 | 20.6k | hash_table::Entry::Occupied(entry) => { | 334 | 20.6k | let i = *entry.get(); | 335 | 20.6k | (i, Some(mem::replace(&mut self.entries[i].value, value))) | 336 | | } | 337 | 94.3k | hash_table::Entry::Vacant(entry) => { | 338 | 94.3k | let i = self.entries.len(); | 339 | 94.3k | entry.insert(i); | 340 | 94.3k | self.push_entry(hash, key, value); | 341 | 94.3k | debug_assert_eq!(self.indices.len(), self.entries.len()); | 342 | 94.3k | (i, None) | 343 | | } | 344 | | } | 345 | 114k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::insert_full <indexmap::inner::Core<(u32, u32), ()>>::insert_full Line | Count | Source | 326 | 4 | pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option<V>) | 327 | 4 | where | 328 | 4 | K: Eq, | 329 | | { | 330 | 4 | let eq = equivalent(&key, &self.entries); | 331 | 4 | let hasher = get_hash(&self.entries); | 332 | 4 | match self.indices.entry(hash.get(), eq, hasher) { | 333 | 0 | hash_table::Entry::Occupied(entry) => { | 334 | 0 | let i = *entry.get(); | 335 | 0 | (i, Some(mem::replace(&mut self.entries[i].value, value))) | 336 | | } | 337 | 4 | hash_table::Entry::Vacant(entry) => { | 338 | 4 | let i = self.entries.len(); | 339 | 4 | entry.insert(i); | 340 | 4 | self.push_entry(hash, key, value); | 341 | 4 | debug_assert_eq!(self.indices.len(), self.entries.len()); | 342 | 4 | (i, None) | 343 | | } | 344 | | } | 345 | 4 | } |
Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::insert_full Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::insert_full |
346 | | |
347 | | /// Same as `insert_full`, except it also replaces the key |
348 | | pub(crate) fn replace_full( |
349 | | &mut self, |
350 | | hash: HashValue, |
351 | | key: K, |
352 | | value: V, |
353 | | ) -> (usize, Option<(K, V)>) |
354 | | where |
355 | | K: Eq, |
356 | | { |
357 | | let eq = equivalent(&key, &self.entries); |
358 | | let hasher = get_hash(&self.entries); |
359 | | match self.indices.entry(hash.get(), eq, hasher) { |
360 | | hash_table::Entry::Occupied(entry) => { |
361 | | let i = *entry.get(); |
362 | | let entry = &mut self.entries[i]; |
363 | | let kv = ( |
364 | | mem::replace(&mut entry.key, key), |
365 | | mem::replace(&mut entry.value, value), |
366 | | ); |
367 | | (i, Some(kv)) |
368 | | } |
369 | | hash_table::Entry::Vacant(entry) => { |
370 | | let i = self.entries.len(); |
371 | | entry.insert(i); |
372 | | self.push_entry(hash, key, value); |
373 | | debug_assert_eq!(self.indices.len(), self.entries.len()); |
374 | | (i, None) |
375 | | } |
376 | | } |
377 | | } |
378 | | |
379 | | /// Remove an entry by shifting all entries that follow it |
380 | | pub(crate) fn shift_remove_full<Q>(&mut self, hash: HashValue, key: &Q) -> Option<(usize, K, V)> |
381 | | where |
382 | | Q: ?Sized + Equivalent<K>, |
383 | | { |
384 | | let eq = equivalent(key, &self.entries); |
385 | | let (index, _) = self.indices.find_entry(hash.get(), eq).ok()?.remove(); |
386 | | let (key, value) = self.shift_remove_finish(index); |
387 | | Some((index, key, value)) |
388 | | } |
389 | | |
390 | | /// Remove an entry by swapping it with the last |
391 | | pub(crate) fn swap_remove_full<Q>(&mut self, hash: HashValue, key: &Q) -> Option<(usize, K, V)> |
392 | | where |
393 | | Q: ?Sized + Equivalent<K>, |
394 | | { |
395 | | let eq = equivalent(key, &self.entries); |
396 | | let (index, _) = self.indices.find_entry(hash.get(), eq).ok()?.remove(); |
397 | | let (key, value) = self.swap_remove_finish(index); |
398 | | Some((index, key, value)) |
399 | | } |
400 | | |
401 | | /// Erase `start..end` from `indices`, and shift `end..` indices down to `start..` |
402 | | /// |
403 | | /// All of these items should still be at their original location in `entries`. |
404 | | /// This is used by `drain`, which will let `Vec::drain` do the work on `entries`. |
405 | 9 | fn erase_indices(&mut self, start: usize, end: usize) { |
406 | 9 | let (init, shifted_entries) = self.entries.split_at(end); |
407 | 9 | let (start_entries, erased_entries) = init.split_at(start); |
408 | | |
409 | 9 | let erased = erased_entries.len(); |
410 | 9 | let shifted = shifted_entries.len(); |
411 | 9 | let half_capacity = self.indices.capacity() / 2; |
412 | | |
413 | | // Use a heuristic between different strategies |
414 | 9 | if erased == 0 { |
415 | 1 | // Degenerate case, nothing to do |
416 | 8 | } else if start + shifted < half_capacity && start < erased { |
417 | 8 | // Reinsert everything, as there are few kept indices |
418 | 8 | self.indices.clear(); |
419 | 8 | |
420 | 8 | // Reinsert stable indices, then shifted indices |
421 | 8 | insert_bulk_no_grow(&mut self.indices, start_entries); |
422 | 8 | insert_bulk_no_grow(&mut self.indices, shifted_entries); |
423 | 8 | } else if erased + shifted < half_capacity { |
424 | | // Find each affected index, as there are few to adjust |
425 | | |
426 | | // Find erased indices |
427 | 0 | for (i, entry) in (start..).zip(erased_entries) { |
428 | 0 | erase_index(&mut self.indices, entry.hash, i); |
429 | 0 | } |
430 | | |
431 | | // Find shifted indices |
432 | 0 | for ((new, old), entry) in (start..).zip(end..).zip(shifted_entries) { |
433 | 0 | update_index(&mut self.indices, entry.hash, old, new); |
434 | 0 | } |
435 | | } else { |
436 | | // Sweep the whole table for adjustments |
437 | 0 | let offset = end - start; |
438 | 0 | self.indices.retain(move |i| { |
439 | 0 | if *i >= end { |
440 | 0 | *i -= offset; |
441 | 0 | true |
442 | | } else { |
443 | 0 | *i < start |
444 | | } |
445 | 0 | }); Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::erase_indices::{closure#0}Unexecuted instantiation: <indexmap::inner::Core<naga::ir::Type, ()>>::erase_indices::{closure#0} |
446 | | } |
447 | | |
448 | 9 | debug_assert_eq!(self.indices.len(), start + shifted); |
449 | 9 | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::erase_indices Line | Count | Source | 405 | 1 | fn erase_indices(&mut self, start: usize, end: usize) { | 406 | 1 | let (init, shifted_entries) = self.entries.split_at(end); | 407 | 1 | let (start_entries, erased_entries) = init.split_at(start); | 408 | | | 409 | 1 | let erased = erased_entries.len(); | 410 | 1 | let shifted = shifted_entries.len(); | 411 | 1 | let half_capacity = self.indices.capacity() / 2; | 412 | | | 413 | | // Use a heuristic between different strategies | 414 | 1 | if erased == 0 { | 415 | 0 | // Degenerate case, nothing to do | 416 | 1 | } else if start + shifted < half_capacity && start < erased { | 417 | 1 | // Reinsert everything, as there are few kept indices | 418 | 1 | self.indices.clear(); | 419 | 1 | | 420 | 1 | // Reinsert stable indices, then shifted indices | 421 | 1 | insert_bulk_no_grow(&mut self.indices, start_entries); | 422 | 1 | insert_bulk_no_grow(&mut self.indices, shifted_entries); | 423 | 1 | } else if erased + shifted < half_capacity { | 424 | | // Find each affected index, as there are few to adjust | 425 | | | 426 | | // Find erased indices | 427 | 0 | for (i, entry) in (start..).zip(erased_entries) { | 428 | 0 | erase_index(&mut self.indices, entry.hash, i); | 429 | 0 | } | 430 | | | 431 | | // Find shifted indices | 432 | 0 | for ((new, old), entry) in (start..).zip(end..).zip(shifted_entries) { | 433 | 0 | update_index(&mut self.indices, entry.hash, old, new); | 434 | 0 | } | 435 | | } else { | 436 | | // Sweep the whole table for adjustments | 437 | 0 | let offset = end - start; | 438 | 0 | self.indices.retain(move |i| { | 439 | | if *i >= end { | 440 | | *i -= offset; | 441 | | true | 442 | | } else { | 443 | | *i < start | 444 | | } | 445 | | }); | 446 | | } | 447 | | | 448 | 1 | debug_assert_eq!(self.indices.len(), start + shifted); | 449 | 1 | } |
<indexmap::inner::Core<naga::ir::Type, ()>>::erase_indices Line | Count | Source | 405 | 8 | fn erase_indices(&mut self, start: usize, end: usize) { | 406 | 8 | let (init, shifted_entries) = self.entries.split_at(end); | 407 | 8 | let (start_entries, erased_entries) = init.split_at(start); | 408 | | | 409 | 8 | let erased = erased_entries.len(); | 410 | 8 | let shifted = shifted_entries.len(); | 411 | 8 | let half_capacity = self.indices.capacity() / 2; | 412 | | | 413 | | // Use a heuristic between different strategies | 414 | 8 | if erased == 0 { | 415 | 1 | // Degenerate case, nothing to do | 416 | 7 | } else if start + shifted < half_capacity && start < erased { | 417 | 7 | // Reinsert everything, as there are few kept indices | 418 | 7 | self.indices.clear(); | 419 | 7 | | 420 | 7 | // Reinsert stable indices, then shifted indices | 421 | 7 | insert_bulk_no_grow(&mut self.indices, start_entries); | 422 | 7 | insert_bulk_no_grow(&mut self.indices, shifted_entries); | 423 | 7 | } else if erased + shifted < half_capacity { | 424 | | // Find each affected index, as there are few to adjust | 425 | | | 426 | | // Find erased indices | 427 | 0 | for (i, entry) in (start..).zip(erased_entries) { | 428 | 0 | erase_index(&mut self.indices, entry.hash, i); | 429 | 0 | } | 430 | | | 431 | | // Find shifted indices | 432 | 0 | for ((new, old), entry) in (start..).zip(end..).zip(shifted_entries) { | 433 | 0 | update_index(&mut self.indices, entry.hash, old, new); | 434 | 0 | } | 435 | | } else { | 436 | | // Sweep the whole table for adjustments | 437 | 0 | let offset = end - start; | 438 | 0 | self.indices.retain(move |i| { | 439 | | if *i >= end { | 440 | | *i -= offset; | 441 | | true | 442 | | } else { | 443 | | *i < start | 444 | | } | 445 | | }); | 446 | | } | 447 | | | 448 | 8 | debug_assert_eq!(self.indices.len(), start + shifted); | 449 | 8 | } |
|
450 | | |
451 | | pub(crate) fn retain_in_order<F>(&mut self, mut keep: F) |
452 | | where |
453 | | F: FnMut(&mut K, &mut V) -> bool, |
454 | | { |
455 | | self.entries |
456 | | .retain_mut(|entry| keep(&mut entry.key, &mut entry.value)); |
457 | | if self.entries.len() < self.indices.len() { |
458 | | self.rebuild_hash_table(); |
459 | | } |
460 | | } |
461 | | |
462 | | fn rebuild_hash_table(&mut self) { |
463 | | self.indices.clear(); |
464 | | insert_bulk_no_grow(&mut self.indices, &self.entries); |
465 | | } |
466 | | |
467 | | pub(crate) fn reverse(&mut self) { |
468 | | self.entries.reverse(); |
469 | | |
470 | | // No need to save hash indices, can easily calculate what they should |
471 | | // be, given that this is an in-place reversal. |
472 | | let len = self.entries.len(); |
473 | | for i in &mut self.indices { |
474 | | *i = len - *i - 1; |
475 | | } |
476 | | } |
477 | | |
478 | | /// Reserve entries capacity, rounded up to match the indices |
479 | | #[inline] |
480 | 94.2k | fn reserve_entries(&mut self, additional: usize) { |
481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly |
482 | | // requested more, do it and let them have the resulting panic. |
483 | 94.2k | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); |
484 | 94.2k | let try_add = try_capacity - self.entries.len(); |
485 | 94.2k | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { |
486 | 94.2k | return; |
487 | 0 | } |
488 | 0 | self.entries.reserve_exact(additional); |
489 | 94.2k | } <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, alloc::string::String>>::reserve_entries Line | Count | Source | 480 | 11 | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 11 | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 11 | let try_add = try_capacity - self.entries.len(); | 485 | 11 | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 11 | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 11 | } |
<indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Expression>, (alloc::string::String, naga::span::Span)>>::reserve_entries Line | Count | Source | 480 | 23 | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 23 | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 23 | let try_add = try_capacity - self.entries.len(); | 485 | 23 | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 23 | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 23 | } |
Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::GlobalVariable>, alloc::vec::Vec<alloc::string::String>>>::reserve_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Type>, alloc::vec::Vec<alloc::string::String>>>::reserve_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Constant>, alloc::vec::Vec<alloc::string::String>>>::reserve_entries Unexecuted instantiation: <indexmap::inner::Core<naga::arena::handle::Handle<naga::ir::Function>, alloc::vec::Vec<alloc::string::String>>>::reserve_entries Unexecuted instantiation: <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::reserve_entries Unexecuted instantiation: <indexmap::inner::Core<naga::ir::PredeclaredType, naga::arena::handle::Handle<naga::ir::Type>>>::reserve_entries <indexmap::inner::Core<naga::ir::Type, ()>>::reserve_entries Line | Count | Source | 480 | 107 | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 107 | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 107 | let try_add = try_capacity - self.entries.len(); | 485 | 107 | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 107 | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 107 | } |
<indexmap::inner::Core<naga::front::wgsl::parse::ast::Dependency, ()>>::reserve_entries Line | Count | Source | 480 | 94.0k | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 94.0k | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 94.0k | let try_add = try_capacity - self.entries.len(); | 485 | 94.0k | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 94.0k | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 94.0k | } |
Unexecuted instantiation: <indexmap::inner::Core<(naga::arena::handle::Handle<naga::ir::Type>, usize), alloc::vec::Vec<alloc::string::String>>>::reserve_entries <indexmap::inner::Core<(u32, u32), ()>>::reserve_entries Line | Count | Source | 480 | 4 | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 4 | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 4 | let try_add = try_capacity - self.entries.len(); | 485 | 4 | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 4 | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 4 | } |
Unexecuted instantiation: <indexmap::inner::Core<usize, alloc::vec::Vec<alloc::string::String>>>::reserve_entries <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::reserve_entries Line | Count | Source | 480 | 15 | fn reserve_entries(&mut self, additional: usize) { | 481 | | // Use a soft-limit on the maximum capacity, but if the caller explicitly | 482 | | // requested more, do it and let them have the resulting panic. | 483 | 15 | let try_capacity = Ord::min(self.indices.capacity(), Self::MAX_ENTRIES_CAPACITY); | 484 | 15 | let try_add = try_capacity - self.entries.len(); | 485 | 15 | if try_add > additional && self.entries.try_reserve_exact(try_add).is_ok() { | 486 | 15 | return; | 487 | 0 | } | 488 | 0 | self.entries.reserve_exact(additional); | 489 | 15 | } |
Unexecuted instantiation: <indexmap::inner::Core<u32, (usize, alloc::vec::Vec<i32>)>>::reserve_entries |
490 | | |
491 | | /// Insert a key-value pair in `entries`, |
492 | | /// *without* checking whether it already exists. |
493 | 19 | pub(super) fn insert_unique(&mut self, hash: HashValue, key: K, value: V) -> &mut Bucket<K, V> { |
494 | 19 | let i = self.indices.len(); |
495 | 19 | debug_assert_eq!(i, self.entries.len()); |
496 | 19 | self.indices |
497 | 19 | .insert_unique(hash.get(), i, get_hash(&self.entries)); |
498 | 19 | self.push_entry(hash, key, value); |
499 | 19 | &mut self.entries[i] |
500 | 19 | } Unexecuted instantiation: <indexmap::inner::Core<naga::diagnostic_filter::FilterableTriggeringRule, (naga::diagnostic_filter::Severity, naga::span::Span)>>::insert_unique <indexmap::inner::Core<u32, alloc::vec::Vec<(u32, petgraph::graphmap::CompactDirection)>>>::insert_unique Line | Count | Source | 493 | 19 | pub(super) fn insert_unique(&mut self, hash: HashValue, key: K, value: V) -> &mut Bucket<K, V> { | 494 | 19 | let i = self.indices.len(); | 495 | 19 | debug_assert_eq!(i, self.entries.len()); | 496 | 19 | self.indices | 497 | 19 | .insert_unique(hash.get(), i, get_hash(&self.entries)); | 498 | 19 | self.push_entry(hash, key, value); | 499 | 19 | &mut self.entries[i] | 500 | 19 | } |
|
501 | | |
502 | | /// Replaces the key at the given index, |
503 | | /// *without* checking whether it already exists. |
504 | | #[track_caller] |
505 | | pub(crate) fn replace_index_unique(&mut self, index: usize, hash: HashValue, key: K) -> K { |
506 | | // NB: This removal and insertion isn't "no grow" (with unreachable hasher) |
507 | | // because hashbrown's tombstones might force a resize anyway. |
508 | | erase_index(&mut self.indices, self.entries[index].hash, index); |
509 | | self.indices |
510 | | .insert_unique(hash.get(), index, get_hash(&self.entries)); |
511 | | |
512 | | let entry = &mut self.entries[index]; |
513 | | entry.hash = hash; |
514 | | mem::replace(&mut entry.key, key) |
515 | | } |
516 | | |
517 | | /// Insert a key-value pair in `entries` at a particular index, |
518 | | /// *without* checking whether it already exists. |
519 | | pub(crate) fn shift_insert_unique( |
520 | | &mut self, |
521 | | index: usize, |
522 | | hash: HashValue, |
523 | | key: K, |
524 | | value: V, |
525 | | ) -> &mut Bucket<K, V> { |
526 | | let end = self.indices.len(); |
527 | | assert!(index <= end); |
528 | | // Increment others first so we don't have duplicate indices. |
529 | | self.increment_indices(index, end); |
530 | | let entries = &*self.entries; |
531 | | self.indices.insert_unique(hash.get(), index, move |&i| { |
532 | | // Adjust for the incremented indices to find hashes. |
533 | | debug_assert_ne!(i, index); |
534 | | let i = if i < index { i } else { i - 1 }; |
535 | | entries[i].hash.get() |
536 | | }); |
537 | | if self.entries.len() == self.entries.capacity() { |
538 | | // Reserve our own capacity synced to the indices, |
539 | | // rather than letting `Vec::insert` just double it. |
540 | | self.reserve_entries(1); |
541 | | } |
542 | | self.entries.insert(index, Bucket { hash, key, value }); |
543 | | &mut self.entries[index] |
544 | | } |
545 | | |
546 | | /// Remove an entry by shifting all entries that follow it |
547 | | pub(crate) fn shift_remove_index(&mut self, index: usize) -> Option<(K, V)> { |
548 | | match self.entries.get(index) { |
549 | | Some(entry) => { |
550 | | erase_index(&mut self.indices, entry.hash, index); |
551 | | Some(self.shift_remove_finish(index)) |
552 | | } |
553 | | None => None, |
554 | | } |
555 | | } |
556 | | |
557 | | /// Remove an entry by shifting all entries that follow it |
558 | | /// |
559 | | /// The index should already be removed from `self.indices`. |
560 | | fn shift_remove_finish(&mut self, index: usize) -> (K, V) { |
561 | | // Correct indices that point to the entries that followed the removed entry. |
562 | | self.decrement_indices(index + 1, self.entries.len()); |
563 | | |
564 | | // Use Vec::remove to actually remove the entry. |
565 | | let entry = self.entries.remove(index); |
566 | | (entry.key, entry.value) |
567 | | } |
568 | | |
569 | | /// Remove an entry by swapping it with the last |
570 | 0 | pub(crate) fn swap_remove_index(&mut self, index: usize) -> Option<(K, V)> { |
571 | 0 | match self.entries.get(index) { |
572 | 0 | Some(entry) => { |
573 | 0 | erase_index(&mut self.indices, entry.hash, index); |
574 | 0 | Some(self.swap_remove_finish(index)) |
575 | | } |
576 | 0 | None => None, |
577 | | } |
578 | 0 | } |
579 | | |
580 | | /// Finish removing an entry by swapping it with the last |
581 | | /// |
582 | | /// The index should already be removed from `self.indices`. |
583 | 0 | fn swap_remove_finish(&mut self, index: usize) -> (K, V) { |
584 | | // use swap_remove, but then we need to update the index that points |
585 | | // to the other entry that has to move |
586 | 0 | let entry = self.entries.swap_remove(index); |
587 | | |
588 | | // correct index that points to the entry that had to swap places |
589 | 0 | if let Some(entry) = self.entries.get(index) { |
590 | 0 | // was not last element |
591 | 0 | // examine new element in `index` and find it in indices |
592 | 0 | let last = self.entries.len(); |
593 | 0 | update_index(&mut self.indices, entry.hash, last, index); |
594 | 0 | } |
595 | | |
596 | 0 | (entry.key, entry.value) |
597 | 0 | } |
598 | | |
599 | | /// Decrement all indices in the range `start..end`. |
600 | | /// |
601 | | /// The index `start - 1` should not exist in `self.indices`. |
602 | | /// All entries should still be in their original positions. |
603 | | fn decrement_indices(&mut self, start: usize, end: usize) { |
604 | | // Use a heuristic between a full sweep vs. a `find()` for every shifted item. |
605 | | let shifted_entries = &self.entries[start..end]; |
606 | | if shifted_entries.len() > self.indices.capacity() / 2 { |
607 | | // Shift all indices in range. |
608 | | for i in &mut self.indices { |
609 | | if start <= *i && *i < end { |
610 | | *i -= 1; |
611 | | } |
612 | | } |
613 | | } else { |
614 | | // Find each entry in range to shift its index. |
615 | | for (i, entry) in (start..end).zip(shifted_entries) { |
616 | | update_index(&mut self.indices, entry.hash, i, i - 1); |
617 | | } |
618 | | } |
619 | | } |
620 | | |
621 | | /// Increment all indices in the range `start..end`. |
622 | | /// |
623 | | /// The index `end` should not exist in `self.indices`. |
624 | | /// All entries should still be in their original positions. |
625 | | fn increment_indices(&mut self, start: usize, end: usize) { |
626 | | // Use a heuristic between a full sweep vs. a `find()` for every shifted item. |
627 | | let shifted_entries = &self.entries[start..end]; |
628 | | if shifted_entries.len() > self.indices.capacity() / 2 { |
629 | | // Shift all indices in range. |
630 | | for i in &mut self.indices { |
631 | | if start <= *i && *i < end { |
632 | | *i += 1; |
633 | | } |
634 | | } |
635 | | } else { |
636 | | // Find each entry in range to shift its index, updated in reverse so |
637 | | // we never have duplicated indices that might have a hash collision. |
638 | | for (i, entry) in (start..end).zip(shifted_entries).rev() { |
639 | | update_index(&mut self.indices, entry.hash, i, i + 1); |
640 | | } |
641 | | } |
642 | | } |
643 | | |
644 | | #[track_caller] |
645 | | pub(super) fn move_index(&mut self, from: usize, to: usize) { |
646 | | let from_hash = self.entries[from].hash; |
647 | | if from != to { |
648 | | let _ = self.entries[to]; // explicit bounds check |
649 | | |
650 | | // Find the bucket index first so we won't lose it among other updated indices. |
651 | | let bucket = self |
652 | | .indices |
653 | | .find_bucket_index(from_hash.get(), move |&i| i == from) |
654 | | .expect("index not found"); |
655 | | |
656 | | self.move_index_inner(from, to); |
657 | | *self.indices.get_bucket_mut(bucket).unwrap() = to; |
658 | | } |
659 | | } |
660 | | |
661 | | fn move_index_inner(&mut self, from: usize, to: usize) { |
662 | | // Update all other indices and rotate the entry positions. |
663 | | if from < to { |
664 | | self.decrement_indices(from + 1, to + 1); |
665 | | self.entries[from..=to].rotate_left(1); |
666 | | } else if to < from { |
667 | | self.increment_indices(to, from); |
668 | | self.entries[to..=from].rotate_right(1); |
669 | | } |
670 | | } |
671 | | |
672 | | #[track_caller] |
673 | | pub(crate) fn swap_indices(&mut self, a: usize, b: usize) { |
674 | | // If they're equal and in-bounds, there's nothing to do. |
675 | | if a == b && a < self.entries.len() { |
676 | | return; |
677 | | } |
678 | | |
679 | | // We'll get a "nice" bounds-check from indexing `entries`, |
680 | | // and then we expect to find it in the table as well. |
681 | | match self.indices.get_disjoint_mut( |
682 | | [self.entries[a].hash.get(), self.entries[b].hash.get()], |
683 | | move |i, &x| if i == 0 { x == a } else { x == b }, |
684 | | ) { |
685 | | [Some(ref_a), Some(ref_b)] => { |
686 | | mem::swap(ref_a, ref_b); |
687 | | self.entries.swap(a, b); |
688 | | } |
689 | | _ => panic!("indices not found"), |
690 | | } |
691 | | } |
692 | | } |
693 | | |
694 | | #[test] |
695 | | fn assert_send_sync() { |
696 | | fn assert_send_sync<T: Send + Sync>() {} |
697 | | assert_send_sync::<Core<i32, i32>>(); |
698 | | } |