/rust/registry/src/index.crates.io-6f17d22bba15001f/ring-0.17.14/src/aead/gcm.rs
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2018-2024 Brian Smith. |
2 | | // |
3 | | // Permission to use, copy, modify, and/or distribute this software for any |
4 | | // purpose with or without fee is hereby granted, provided that the above |
5 | | // copyright notice and this permission notice appear in all copies. |
6 | | // |
7 | | // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
8 | | // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
9 | | // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY |
10 | | // SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
11 | | // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION |
12 | | // OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN |
13 | | // CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
14 | | |
15 | | use self::ffi::{Block, BLOCK_LEN, ZERO_BLOCK}; |
16 | | use super::{aes_gcm, Aad}; |
17 | | use crate::{ |
18 | | bits::{BitLength, FromByteLen as _}, |
19 | | error::{self, InputTooLongError}, |
20 | | polyfill::{slice::AsChunks, sliceutil::overwrite_at_start, NotSend}, |
21 | | }; |
22 | | use cfg_if::cfg_if; |
23 | | |
24 | | pub(super) use ffi::KeyValue; |
25 | | |
26 | | cfg_if! { |
27 | | if #[cfg(any(all(target_arch = "aarch64", target_endian = "little"), target_arch = "x86_64"))] { |
28 | | pub(super) use self::ffi::{HTable, Xi}; |
29 | | } else { |
30 | | use self::ffi::{HTable, Xi}; |
31 | | } |
32 | | } |
33 | | |
34 | | #[macro_use] |
35 | | mod ffi; |
36 | | |
37 | | pub(super) mod clmul; |
38 | | pub(super) mod clmulavxmovbe; |
39 | | pub(super) mod fallback; |
40 | | pub(super) mod neon; |
41 | | pub(super) mod vclmulavx2; |
42 | | |
43 | | pub(super) struct Context<'key, K> { |
44 | | Xi: Xi, |
45 | | key: &'key K, |
46 | | aad_len: BitLength<u64>, |
47 | | in_out_len: BitLength<u64>, |
48 | | _not_send: NotSend, |
49 | | } |
50 | | |
51 | | impl<'key, K: UpdateBlock> Context<'key, K> { |
52 | | #[inline(always)] |
53 | 0 | pub(crate) fn new( |
54 | 0 | key: &'key K, |
55 | 0 | aad: Aad<&[u8]>, |
56 | 0 | in_out_len: usize, |
57 | 0 | ) -> Result<Self, error::Unspecified> { |
58 | 0 | if in_out_len > aes_gcm::MAX_IN_OUT_LEN { |
59 | 0 | return Err(error::Unspecified); |
60 | 0 | } |
61 | 0 | let in_out_len = |
62 | 0 | BitLength::from_byte_len(in_out_len).map_err(error::erase::<InputTooLongError>)?; |
63 | 0 | let aad_len = BitLength::from_byte_len(aad.as_ref().len()) |
64 | 0 | .map_err(error::erase::<InputTooLongError>)?; |
65 | | |
66 | | // NIST SP800-38D Section 5.2.1.1 says that the maximum AAD length is |
67 | | // 2**64 - 1 bits, i.e. BitLength<u64>::MAX, so we don't need to do an |
68 | | // explicit check here. |
69 | | |
70 | 0 | let mut ctx = Self { |
71 | 0 | Xi: Xi(ZERO_BLOCK), |
72 | 0 | key, |
73 | 0 | aad_len, |
74 | 0 | in_out_len, |
75 | 0 | _not_send: NotSend::VALUE, |
76 | 0 | }; |
77 | | |
78 | 0 | for ad in aad.0.chunks(BLOCK_LEN) { |
79 | 0 | let mut block = ZERO_BLOCK; |
80 | 0 | overwrite_at_start(&mut block, ad); |
81 | 0 | ctx.update_block(block); |
82 | 0 | } |
83 | | |
84 | 0 | Ok(ctx) |
85 | 0 | } Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmulavxmovbe::Key>>::new Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::vclmulavx2::Key>>::new Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmul::Key>>::new Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::fallback::Key>>::new |
86 | | } |
87 | | |
88 | | #[cfg(all( |
89 | | target_arch = "aarch64", |
90 | | target_endian = "little", |
91 | | target_pointer_width = "64" |
92 | | ))] |
93 | | impl<K> Context<'_, K> { |
94 | | pub(super) fn in_out_whole_block_bits(&self) -> BitLength<usize> { |
95 | | use crate::polyfill::usize_from_u64; |
96 | | const WHOLE_BLOCK_BITS_MASK: usize = !0b111_1111; |
97 | | #[allow(clippy::assertions_on_constants)] |
98 | | const _WHOLE_BLOCK_BITS_MASK_CORRECT: () = |
99 | | assert!(WHOLE_BLOCK_BITS_MASK == !((BLOCK_LEN * 8) - 1)); |
100 | | BitLength::from_bits(usize_from_u64(self.in_out_len.as_bits()) & WHOLE_BLOCK_BITS_MASK) |
101 | | } |
102 | | } |
103 | | |
104 | | #[cfg(all(target_arch = "aarch64", target_endian = "little"))] |
105 | | /// Access to `inner` for the integrated AES-GCM implementations only. |
106 | | impl Context<'_, clmul::Key> { |
107 | | #[inline] |
108 | | pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) { |
109 | | (&self.key.inner(), &mut self.Xi) |
110 | | } |
111 | | } |
112 | | |
113 | | #[cfg(target_arch = "x86_64")] |
114 | | impl Context<'_, clmulavxmovbe::Key> { |
115 | | /// Access to `inner` for the integrated AES-GCM implementations only. |
116 | | #[inline] |
117 | 0 | pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) { |
118 | 0 | (self.key.inner(), &mut self.Xi) |
119 | 0 | } |
120 | | } |
121 | | |
122 | | #[cfg(target_arch = "x86_64")] |
123 | | impl Context<'_, vclmulavx2::Key> { |
124 | | /// Access to `inner` for the integrated AES-GCM implementations only. |
125 | | #[inline] |
126 | 0 | pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) { |
127 | 0 | (self.key.inner(), &mut self.Xi) |
128 | 0 | } |
129 | | } |
130 | | |
131 | | impl<K: UpdateBlocks> Context<'_, K> { |
132 | | #[inline(always)] |
133 | 0 | pub fn update_blocks(&mut self, input: AsChunks<u8, BLOCK_LEN>) { |
134 | 0 | self.key.update_blocks(&mut self.Xi, input); |
135 | 0 | } Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmulavxmovbe::Key>>::update_blocks Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmul::Key>>::update_blocks Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::fallback::Key>>::update_blocks |
136 | | } |
137 | | |
138 | | impl<K: UpdateBlock> Context<'_, K> { |
139 | 0 | pub fn update_block(&mut self, a: Block) { |
140 | 0 | self.key.update_block(&mut self.Xi, a); |
141 | 0 | } Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::vclmulavx2::Key>>::update_block Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmulavxmovbe::Key>>::update_block Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmul::Key>>::update_block Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::fallback::Key>>::update_block |
142 | | |
143 | | #[inline(always)] |
144 | 0 | pub(super) fn pre_finish<F>(mut self, f: F) -> super::Tag |
145 | 0 | where |
146 | 0 | F: FnOnce(Block) -> super::Tag, |
147 | 0 | { |
148 | 0 | let mut block = [0u8; BLOCK_LEN]; |
149 | 0 | let (alen, clen) = block.split_at_mut(BLOCK_LEN / 2); |
150 | 0 | alen.copy_from_slice(&BitLength::<u64>::to_be_bytes(self.aad_len)); |
151 | 0 | clen.copy_from_slice(&BitLength::<u64>::to_be_bytes(self.in_out_len)); |
152 | 0 | self.update_block(block); |
153 | 0 | f(self.Xi.0) |
154 | 0 | } Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::vclmulavx2::Key>>::pre_finish::<ring::aead::aes_gcm::finish<ring::aead::aes::hw::Key, ring::aead::gcm::vclmulavx2::Key>::{closure#0}> Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmulavxmovbe::Key>>::pre_finish::<ring::aead::aes_gcm::finish<ring::aead::aes::hw::Key, ring::aead::gcm::clmulavxmovbe::Key>::{closure#0}> Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::clmul::Key>>::pre_finish::<ring::aead::aes_gcm::finish<ring::aead::aes::hw::Key, ring::aead::gcm::clmul::Key>::{closure#0}> Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::fallback::Key>>::pre_finish::<ring::aead::aes_gcm::finish<ring::aead::aes::vp::Key, ring::aead::gcm::fallback::Key>::{closure#0}> Unexecuted instantiation: <ring::aead::gcm::Context<ring::aead::gcm::fallback::Key>>::pre_finish::<ring::aead::aes_gcm::finish<ring::aead::aes::fallback::Key, ring::aead::gcm::fallback::Key>::{closure#0}> |
155 | | } |
156 | | |
157 | | pub(super) trait UpdateBlock { |
158 | | fn update_block(&self, xi: &mut Xi, a: Block); |
159 | | } |
160 | | |
161 | | pub(super) trait UpdateBlocks { |
162 | | fn update_blocks(&self, xi: &mut Xi, input: AsChunks<u8, BLOCK_LEN>); |
163 | | } |