/src/libunwind/include/remote.h
Line | Count | Source (jump to first uncovered line) |
1 | | #ifndef REMOTE_H |
2 | | #define REMOTE_H |
3 | | |
4 | | /* Helper functions for accessing (remote) memory. These functions |
5 | | assume that all addresses are naturally aligned (e.g., 32-bit |
6 | | quantity is stored at a 32-bit-aligned address. */ |
7 | | |
8 | | #ifdef UNW_LOCAL_ONLY |
9 | | |
10 | | static inline int |
11 | | fetch8 (unw_addr_space_t as UNUSED, unw_accessors_t *a UNUSED, |
12 | | unw_word_t *addr, int8_t *valp, void *arg UNUSED) |
13 | 0 | { |
14 | 0 | *valp = *(int8_t *) (uintptr_t) *addr; |
15 | 0 | *addr += 1; |
16 | 0 | return 0; |
17 | 0 | } |
18 | | |
19 | | static inline int |
20 | | fetch16 (unw_addr_space_t as UNUSED, unw_accessors_t *a UNUSED, |
21 | | unw_word_t *addr, int16_t *valp, void *arg UNUSED) |
22 | 0 | { |
23 | 0 | *valp = *(int16_t *) (uintptr_t) *addr; |
24 | 0 | *addr += 2; |
25 | 0 | return 0; |
26 | 0 | } |
27 | | |
28 | | static inline int |
29 | | fetch32 (unw_addr_space_t as UNUSED, unw_accessors_t *a UNUSED, |
30 | | unw_word_t *addr, int32_t *valp, void *arg UNUSED) |
31 | 0 | { |
32 | 0 | *valp = *(int32_t *) (uintptr_t) *addr; |
33 | 0 | *addr += 4; |
34 | 0 | return 0; |
35 | 0 | } |
36 | | |
37 | | static inline int |
38 | | fetchw (unw_addr_space_t as UNUSED, unw_accessors_t *a UNUSED, |
39 | | unw_word_t *addr, unw_word_t *valp, void *arg UNUSED) |
40 | 0 | { |
41 | 0 | *valp = *(unw_word_t *) (uintptr_t) *addr; |
42 | 0 | *addr += sizeof (unw_word_t); |
43 | 0 | return 0; |
44 | 0 | } |
45 | | |
46 | | #else /* !UNW_LOCAL_ONLY */ |
47 | | |
48 | | #define WSIZE (sizeof (unw_word_t)) |
49 | | |
50 | | static inline int |
51 | | fetch8 (unw_addr_space_t as, unw_accessors_t *a, |
52 | | unw_word_t *addr, int8_t *valp, void *arg) |
53 | | { |
54 | | unw_word_t val, aligned_addr = *addr & (~WSIZE + 1), off = *addr - aligned_addr; |
55 | | int ret; |
56 | | |
57 | | *addr += 1; |
58 | | |
59 | | ret = (*a->access_mem) (as, aligned_addr, &val, 0, arg); |
60 | | |
61 | | #if UNW_BYTE_ORDER == UNW_LITTLE_ENDIAN |
62 | | val >>= 8*off; |
63 | | #else |
64 | | val >>= 8*(WSIZE - 1 - off); |
65 | | #endif |
66 | | *valp = val & 0xff; |
67 | | return ret; |
68 | | } |
69 | | |
70 | | static inline int |
71 | | fetch16 (unw_addr_space_t as, unw_accessors_t *a, |
72 | | unw_word_t *addr, int16_t *valp, void *arg) |
73 | | { |
74 | | unw_word_t val, aligned_addr = *addr & (~WSIZE + 1), off = *addr - aligned_addr; |
75 | | int ret; |
76 | | |
77 | | if ((off & 0x1) != 0) |
78 | | return -UNW_EINVAL; |
79 | | |
80 | | *addr += 2; |
81 | | |
82 | | ret = (*a->access_mem) (as, aligned_addr, &val, 0, arg); |
83 | | |
84 | | #if UNW_BYTE_ORDER == UNW_LITTLE_ENDIAN |
85 | | val >>= 8*off; |
86 | | #else |
87 | | val >>= 8*(WSIZE - 2 - off); |
88 | | #endif |
89 | | *valp = val & 0xffff; |
90 | | return ret; |
91 | | } |
92 | | |
93 | | static inline int |
94 | | fetch32 (unw_addr_space_t as, unw_accessors_t *a, |
95 | | unw_word_t *addr, int32_t *valp, void *arg) |
96 | | { |
97 | | unw_word_t val, aligned_addr = *addr & (~WSIZE + 1), off = *addr - aligned_addr; |
98 | | int ret; |
99 | | |
100 | | if ((off & 0x3) != 0) |
101 | | return -UNW_EINVAL; |
102 | | |
103 | | *addr += 4; |
104 | | |
105 | | ret = (*a->access_mem) (as, aligned_addr, &val, 0, arg); |
106 | | |
107 | | #if UNW_BYTE_ORDER == UNW_LITTLE_ENDIAN |
108 | | val >>= 8*off; |
109 | | #else |
110 | | val >>= 8*(WSIZE - 4 - off); |
111 | | #endif |
112 | | *valp = val & 0xffffffff; |
113 | | return ret; |
114 | | } |
115 | | |
116 | | static inline int |
117 | | fetchw (unw_addr_space_t as, unw_accessors_t *a, |
118 | | unw_word_t *addr, unw_word_t *valp, void *arg) |
119 | | { |
120 | | int ret; |
121 | | |
122 | | ret = (*a->access_mem) (as, *addr, valp, 0, arg); |
123 | | *addr += WSIZE; |
124 | | return ret; |
125 | | } |
126 | | |
127 | | #endif /* !UNW_LOCAL_ONLY */ |
128 | | |
129 | | #endif /* REMOTE_H */ |