/src/cryptsetup/lib/utils_safe_memory.c
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: GPL-2.0-or-later |
2 | | /* |
3 | | * utils_safe_memory - safe memory helpers |
4 | | * |
5 | | * Copyright (C) 2009-2025 Red Hat, Inc. All rights reserved. |
6 | | * Copyright (C) 2009-2025 Milan Broz |
7 | | */ |
8 | | |
9 | | #include <string.h> |
10 | | #include <sys/mman.h> |
11 | | #include "internal.h" |
12 | | |
13 | | struct safe_allocation { |
14 | | size_t size; |
15 | | bool locked; |
16 | | char data[0] __attribute__((aligned(8))); |
17 | | }; |
18 | 5.50k | #define OVERHEAD offsetof(struct safe_allocation, data) |
19 | | |
20 | | /* |
21 | | * Replacement for memset(s, 0, n) on stack that can be optimized out |
22 | | * Also used in safe allocations for explicit memory wipe. |
23 | | */ |
24 | | void crypt_safe_memzero(void *data, size_t size) |
25 | 48.3k | { |
26 | 48.3k | if (!data) |
27 | 0 | return; |
28 | | |
29 | 48.3k | return crypt_backend_memzero(data, size); |
30 | 48.3k | } |
31 | | |
32 | | /* Memcpy helper to avoid spilling sensitive data through additional registers */ |
33 | | void *crypt_safe_memcpy(void *dst, const void *src, size_t size) |
34 | 1.04k | { |
35 | 1.04k | if (!dst || !src) |
36 | 0 | return NULL; |
37 | | |
38 | 1.04k | return crypt_backend_memcpy(dst, src, size); |
39 | 1.04k | } |
40 | | |
41 | | /* safe allocations */ |
42 | | void *crypt_safe_alloc(size_t size) |
43 | 892 | { |
44 | 892 | struct safe_allocation *alloc; |
45 | | |
46 | 892 | if (!size || size > (SIZE_MAX - OVERHEAD)) |
47 | 0 | return NULL; |
48 | | |
49 | 892 | alloc = malloc(size + OVERHEAD); |
50 | 892 | if (!alloc) |
51 | 0 | return NULL; |
52 | | |
53 | 892 | crypt_backend_memzero(alloc, size + OVERHEAD); |
54 | 892 | alloc->size = size; |
55 | | |
56 | | /* Ignore failure if it is over limit. */ |
57 | 892 | if (!mlock(alloc, size + OVERHEAD)) |
58 | 892 | alloc->locked = true; |
59 | | |
60 | | /* coverity[leaked_storage] */ |
61 | 892 | return &alloc->data; |
62 | 892 | } |
63 | | |
64 | | void crypt_safe_free(void *data) |
65 | 1.74k | { |
66 | 1.74k | struct safe_allocation *alloc; |
67 | 1.74k | volatile size_t *s; |
68 | 1.74k | void *p; |
69 | | |
70 | 1.74k | if (!data) |
71 | 850 | return; |
72 | | |
73 | 892 | p = (char *)data - OVERHEAD; |
74 | 892 | alloc = (struct safe_allocation *)p; |
75 | | |
76 | 892 | crypt_backend_memzero(data, alloc->size); |
77 | | |
78 | 892 | if (alloc->locked) { |
79 | 892 | munlock(alloc, alloc->size + OVERHEAD); |
80 | 892 | alloc->locked = false; |
81 | 892 | } |
82 | | |
83 | 892 | s = (volatile size_t *)&alloc->size; |
84 | 892 | *s = 0x55aa55aa; |
85 | 892 | free(alloc); |
86 | 892 | } |
87 | | |
88 | | void *crypt_safe_realloc(void *data, size_t size) |
89 | 0 | { |
90 | 0 | struct safe_allocation *alloc; |
91 | 0 | void *new_data; |
92 | 0 | void *p; |
93 | |
|
94 | 0 | new_data = crypt_safe_alloc(size); |
95 | |
|
96 | 0 | if (new_data && data) { |
97 | |
|
98 | 0 | p = (char *)data - OVERHEAD; |
99 | 0 | alloc = (struct safe_allocation *)p; |
100 | |
|
101 | 0 | if (size > alloc->size) |
102 | 0 | size = alloc->size; |
103 | |
|
104 | 0 | crypt_backend_memcpy(new_data, data, size); |
105 | 0 | } |
106 | |
|
107 | 0 | crypt_safe_free(data); |
108 | 0 | return new_data; |
109 | 0 | } |
110 | | |
111 | | size_t crypt_safe_alloc_size(const void *data) |
112 | 151 | { |
113 | 151 | const void *p; |
114 | | |
115 | 151 | if (!data) |
116 | 0 | return 0; |
117 | | |
118 | 151 | p = (const char *)data - OVERHEAD; |
119 | | |
120 | 151 | return ((const struct safe_allocation *)p)->size; |
121 | 151 | } |