/src/lzo-2.10/src/lzo_func.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* lzo_func.h -- functions |
2 | | |
3 | | This file is part of the LZO real-time data compression library. |
4 | | |
5 | | Copyright (C) 1996-2017 Markus Franz Xaver Johannes Oberhumer |
6 | | All Rights Reserved. |
7 | | |
8 | | The LZO library is free software; you can redistribute it and/or |
9 | | modify it under the terms of the GNU General Public License as |
10 | | published by the Free Software Foundation; either version 2 of |
11 | | the License, or (at your option) any later version. |
12 | | |
13 | | The LZO library is distributed in the hope that it will be useful, |
14 | | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
16 | | GNU General Public License for more details. |
17 | | |
18 | | You should have received a copy of the GNU General Public License |
19 | | along with the LZO library; see the file COPYING. |
20 | | If not, write to the Free Software Foundation, Inc., |
21 | | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. |
22 | | |
23 | | Markus F.X.J. Oberhumer |
24 | | <markus@oberhumer.com> |
25 | | http://www.oberhumer.com/opensource/lzo/ |
26 | | */ |
27 | | |
28 | | |
29 | | /* WARNING: this file should *not* be used by applications. It is |
30 | | part of the implementation of the library and is subject |
31 | | to change. |
32 | | */ |
33 | | |
34 | | |
35 | | #ifndef __LZO_FUNC_H |
36 | | #define __LZO_FUNC_H 1 |
37 | | |
38 | | |
39 | | /*********************************************************************** |
40 | | // bitops |
41 | | ************************************************************************/ |
42 | | |
43 | | #if !defined(LZO_BITOPS_USE_ASM_BITSCAN) && !defined(LZO_BITOPS_USE_GNUC_BITSCAN) && !defined(LZO_BITOPS_USE_MSC_BITSCAN) |
44 | | #if 1 && (LZO_ARCH_AMD64) && (LZO_CC_GNUC && (LZO_CC_GNUC < 0x040000ul)) && (LZO_ASM_SYNTAX_GNUC) |
45 | | #define LZO_BITOPS_USE_ASM_BITSCAN 1 |
46 | | #elif (LZO_CC_CLANG || (LZO_CC_GNUC >= 0x030400ul) || (LZO_CC_INTELC_GNUC && (__INTEL_COMPILER >= 1000)) || (LZO_CC_LLVM && (!defined(__llvm_tools_version__) || (__llvm_tools_version__+0 >= 0x010500ul)))) |
47 | | #define LZO_BITOPS_USE_GNUC_BITSCAN 1 |
48 | | #elif (LZO_OS_WIN32 || LZO_OS_WIN64) && ((LZO_CC_INTELC_MSC && (__INTEL_COMPILER >= 1010)) || (LZO_CC_MSC && (_MSC_VER >= 1400))) |
49 | | #define LZO_BITOPS_USE_MSC_BITSCAN 1 |
50 | | #if (LZO_CC_MSC) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) |
51 | | #include <intrin.h> |
52 | | #endif |
53 | | #if (LZO_CC_MSC) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) |
54 | | #pragma intrinsic(_BitScanReverse) |
55 | | #pragma intrinsic(_BitScanForward) |
56 | | #endif |
57 | | #if (LZO_CC_MSC) && (LZO_ARCH_AMD64) |
58 | | #pragma intrinsic(_BitScanReverse64) |
59 | | #pragma intrinsic(_BitScanForward64) |
60 | | #endif |
61 | | #endif |
62 | | #endif |
63 | | |
64 | | __lzo_static_forceinline unsigned lzo_bitops_ctlz32_func(lzo_uint32_t v) |
65 | 32 | { |
66 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) |
67 | | unsigned long r; (void) _BitScanReverse(&r, v); return (unsigned) r ^ 31; |
68 | | #define lzo_bitops_ctlz32(v) lzo_bitops_ctlz32_func(v) |
69 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC) |
70 | | lzo_uint32_t r; |
71 | | __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); |
72 | | return (unsigned) r ^ 31; |
73 | | #define lzo_bitops_ctlz32(v) lzo_bitops_ctlz32_func(v) |
74 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT == 4) |
75 | | unsigned r; r = (unsigned) __builtin_clz(v); return r; |
76 | 32 | #define lzo_bitops_ctlz32(v) ((unsigned) __builtin_clz(v)) |
77 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG == 8) && (LZO_WORDSIZE >= 8) |
78 | | unsigned r; r = (unsigned) __builtin_clzl(v); return r ^ 32; |
79 | | #define lzo_bitops_ctlz32(v) (((unsigned) __builtin_clzl(v)) ^ 32) |
80 | | #else |
81 | | LZO_UNUSED(v); return 0; |
82 | | #endif |
83 | 32 | } Unexecuted instantiation: lzo1x_1.c:lzo_bitops_ctlz32_func Unexecuted instantiation: lzo1x_d1.c:lzo_bitops_ctlz32_func lzo_init.c:lzo_bitops_ctlz32_func Line | Count | Source | 65 | 32 | { | 66 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) | 67 | | unsigned long r; (void) _BitScanReverse(&r, v); return (unsigned) r ^ 31; | 68 | | #define lzo_bitops_ctlz32(v) lzo_bitops_ctlz32_func(v) | 69 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC) | 70 | | lzo_uint32_t r; | 71 | | __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); | 72 | | return (unsigned) r ^ 31; | 73 | | #define lzo_bitops_ctlz32(v) lzo_bitops_ctlz32_func(v) | 74 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT == 4) | 75 | | unsigned r; r = (unsigned) __builtin_clz(v); return r; | 76 | 32 | #define lzo_bitops_ctlz32(v) ((unsigned) __builtin_clz(v)) | 77 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG == 8) && (LZO_WORDSIZE >= 8) | 78 | | unsigned r; r = (unsigned) __builtin_clzl(v); return r ^ 32; | 79 | | #define lzo_bitops_ctlz32(v) (((unsigned) __builtin_clzl(v)) ^ 32) | 80 | | #else | 81 | | LZO_UNUSED(v); return 0; | 82 | | #endif | 83 | 32 | } |
|
84 | | |
85 | | #if defined(lzo_uint64_t) |
86 | | __lzo_static_forceinline unsigned lzo_bitops_ctlz64_func(lzo_uint64_t v) |
87 | 64 | { |
88 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64) |
89 | | unsigned long r; (void) _BitScanReverse64(&r, v); return (unsigned) r ^ 63; |
90 | | #define lzo_bitops_ctlz64(v) lzo_bitops_ctlz64_func(v) |
91 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC) |
92 | | lzo_uint64_t r; |
93 | | __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); |
94 | | return (unsigned) r ^ 63; |
95 | | #define lzo_bitops_ctlz64(v) lzo_bitops_ctlz64_func(v) |
96 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG == 8) && (LZO_WORDSIZE >= 8) |
97 | | unsigned r; r = (unsigned) __builtin_clzl(v); return r; |
98 | 64 | #define lzo_bitops_ctlz64(v) ((unsigned) __builtin_clzl(v)) |
99 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG == 8) && (LZO_WORDSIZE >= 8) |
100 | | unsigned r; r = (unsigned) __builtin_clzll(v); return r; |
101 | | #define lzo_bitops_ctlz64(v) ((unsigned) __builtin_clzll(v)) |
102 | | #else |
103 | | LZO_UNUSED(v); return 0; |
104 | | #endif |
105 | 64 | } Unexecuted instantiation: lzo1x_1.c:lzo_bitops_ctlz64_func Unexecuted instantiation: lzo1x_d1.c:lzo_bitops_ctlz64_func lzo_init.c:lzo_bitops_ctlz64_func Line | Count | Source | 87 | 64 | { | 88 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64) | 89 | | unsigned long r; (void) _BitScanReverse64(&r, v); return (unsigned) r ^ 63; | 90 | | #define lzo_bitops_ctlz64(v) lzo_bitops_ctlz64_func(v) | 91 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC) | 92 | | lzo_uint64_t r; | 93 | | __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); | 94 | | return (unsigned) r ^ 63; | 95 | | #define lzo_bitops_ctlz64(v) lzo_bitops_ctlz64_func(v) | 96 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG == 8) && (LZO_WORDSIZE >= 8) | 97 | | unsigned r; r = (unsigned) __builtin_clzl(v); return r; | 98 | 64 | #define lzo_bitops_ctlz64(v) ((unsigned) __builtin_clzl(v)) | 99 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG == 8) && (LZO_WORDSIZE >= 8) | 100 | | unsigned r; r = (unsigned) __builtin_clzll(v); return r; | 101 | | #define lzo_bitops_ctlz64(v) ((unsigned) __builtin_clzll(v)) | 102 | | #else | 103 | | LZO_UNUSED(v); return 0; | 104 | | #endif | 105 | 64 | } |
|
106 | | #endif |
107 | | |
108 | | __lzo_static_forceinline unsigned lzo_bitops_cttz32_func(lzo_uint32_t v) |
109 | 32 | { |
110 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) |
111 | | unsigned long r; (void) _BitScanForward(&r, v); return (unsigned) r; |
112 | | #define lzo_bitops_cttz32(v) lzo_bitops_cttz32_func(v) |
113 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC) |
114 | | lzo_uint32_t r; |
115 | | __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); |
116 | | return (unsigned) r; |
117 | | #define lzo_bitops_cttz32(v) lzo_bitops_cttz32_func(v) |
118 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT >= 4) |
119 | | unsigned r; r = (unsigned) __builtin_ctz(v); return r; |
120 | 32 | #define lzo_bitops_cttz32(v) ((unsigned) __builtin_ctz(v)) |
121 | | #else |
122 | | LZO_UNUSED(v); return 0; |
123 | | #endif |
124 | 32 | } Unexecuted instantiation: lzo1x_1.c:lzo_bitops_cttz32_func Unexecuted instantiation: lzo1x_d1.c:lzo_bitops_cttz32_func lzo_init.c:lzo_bitops_cttz32_func Line | Count | Source | 109 | 32 | { | 110 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) | 111 | | unsigned long r; (void) _BitScanForward(&r, v); return (unsigned) r; | 112 | | #define lzo_bitops_cttz32(v) lzo_bitops_cttz32_func(v) | 113 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC) | 114 | | lzo_uint32_t r; | 115 | | __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); | 116 | | return (unsigned) r; | 117 | | #define lzo_bitops_cttz32(v) lzo_bitops_cttz32_func(v) | 118 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT >= 4) | 119 | | unsigned r; r = (unsigned) __builtin_ctz(v); return r; | 120 | 32 | #define lzo_bitops_cttz32(v) ((unsigned) __builtin_ctz(v)) | 121 | | #else | 122 | | LZO_UNUSED(v); return 0; | 123 | | #endif | 124 | 32 | } |
|
125 | | |
126 | | #if defined(lzo_uint64_t) |
127 | | __lzo_static_forceinline unsigned lzo_bitops_cttz64_func(lzo_uint64_t v) |
128 | 64 | { |
129 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64) |
130 | | unsigned long r; (void) _BitScanForward64(&r, v); return (unsigned) r; |
131 | | #define lzo_bitops_cttz64(v) lzo_bitops_cttz64_func(v) |
132 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC) |
133 | | lzo_uint64_t r; |
134 | | __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); |
135 | | return (unsigned) r; |
136 | | #define lzo_bitops_cttz64(v) lzo_bitops_cttz64_func(v) |
137 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG >= 8) && (LZO_WORDSIZE >= 8) |
138 | | unsigned r; r = (unsigned) __builtin_ctzl(v); return r; |
139 | 154k | #define lzo_bitops_cttz64(v) ((unsigned) __builtin_ctzl(v)) |
140 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG >= 8) && (LZO_WORDSIZE >= 8) |
141 | | unsigned r; r = (unsigned) __builtin_ctzll(v); return r; |
142 | | #define lzo_bitops_cttz64(v) ((unsigned) __builtin_ctzll(v)) |
143 | | #else |
144 | | LZO_UNUSED(v); return 0; |
145 | | #endif |
146 | 64 | } Unexecuted instantiation: lzo1x_1.c:lzo_bitops_cttz64_func Unexecuted instantiation: lzo1x_d1.c:lzo_bitops_cttz64_func lzo_init.c:lzo_bitops_cttz64_func Line | Count | Source | 128 | 64 | { | 129 | | #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64) | 130 | | unsigned long r; (void) _BitScanForward64(&r, v); return (unsigned) r; | 131 | | #define lzo_bitops_cttz64(v) lzo_bitops_cttz64_func(v) | 132 | | #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC) | 133 | | lzo_uint64_t r; | 134 | | __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC); | 135 | | return (unsigned) r; | 136 | | #define lzo_bitops_cttz64(v) lzo_bitops_cttz64_func(v) | 137 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG >= 8) && (LZO_WORDSIZE >= 8) | 138 | | unsigned r; r = (unsigned) __builtin_ctzl(v); return r; | 139 | 64 | #define lzo_bitops_cttz64(v) ((unsigned) __builtin_ctzl(v)) | 140 | | #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG >= 8) && (LZO_WORDSIZE >= 8) | 141 | | unsigned r; r = (unsigned) __builtin_ctzll(v); return r; | 142 | | #define lzo_bitops_cttz64(v) ((unsigned) __builtin_ctzll(v)) | 143 | | #else | 144 | | LZO_UNUSED(v); return 0; | 145 | | #endif | 146 | 64 | } |
|
147 | | #endif |
148 | | |
149 | | lzo_unused_funcs_impl(void, lzo_bitops_unused_funcs)(void) |
150 | 0 | { |
151 | 0 | LZO_UNUSED_FUNC(lzo_bitops_unused_funcs); |
152 | 0 | LZO_UNUSED_FUNC(lzo_bitops_ctlz32_func); |
153 | 0 | LZO_UNUSED_FUNC(lzo_bitops_cttz32_func); |
154 | 0 | #if defined(lzo_uint64_t) |
155 | 0 | LZO_UNUSED_FUNC(lzo_bitops_ctlz64_func); |
156 | 0 | LZO_UNUSED_FUNC(lzo_bitops_cttz64_func); |
157 | 0 | #endif |
158 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_bitops_unused_funcs Unexecuted instantiation: lzo1x_d1.c:lzo_bitops_unused_funcs Unexecuted instantiation: lzo_init.c:lzo_bitops_unused_funcs |
159 | | |
160 | | |
161 | | /*********************************************************************** |
162 | | // memops |
163 | | ************************************************************************/ |
164 | | |
165 | | #if defined(__lzo_alignof) && !(LZO_CFG_NO_UNALIGNED) |
166 | | /* CBUG: disabled because of gcc bug 64516 */ |
167 | | #if !defined(lzo_memops_tcheck__) && 0 |
168 | | #define lzo_memops_tcheck__(t,a,b) ((void)0, sizeof(t) == (a) && __lzo_alignof(t) == (b)) |
169 | | #endif |
170 | | #endif |
171 | | #ifndef lzo_memops_TU0p |
172 | | #define lzo_memops_TU0p void __LZO_MMODEL * |
173 | | #endif |
174 | | #ifndef lzo_memops_TU1p |
175 | 945 | #define lzo_memops_TU1p unsigned char __LZO_MMODEL * |
176 | | #endif |
177 | | #ifndef lzo_memops_TU2p |
178 | | #if (LZO_OPT_UNALIGNED16) |
179 | | typedef lzo_uint16_t __lzo_may_alias lzo_memops_TU2; |
180 | | #define lzo_memops_TU2p volatile lzo_memops_TU2 * |
181 | | #elif defined(__lzo_byte_struct) |
182 | | __lzo_byte_struct(lzo_memops_TU2_struct,2) |
183 | | typedef struct lzo_memops_TU2_struct lzo_memops_TU2; |
184 | | #else |
185 | | struct lzo_memops_TU2_struct { unsigned char a[2]; } __lzo_may_alias; |
186 | | typedef struct lzo_memops_TU2_struct lzo_memops_TU2; |
187 | | #endif |
188 | | #ifndef lzo_memops_TU2p |
189 | | #define lzo_memops_TU2p lzo_memops_TU2 * |
190 | | #endif |
191 | | #endif |
192 | | #ifndef lzo_memops_TU4p |
193 | | #if (LZO_OPT_UNALIGNED32) |
194 | | typedef lzo_uint32_t __lzo_may_alias lzo_memops_TU4; |
195 | | #define lzo_memops_TU4p volatile lzo_memops_TU4 __LZO_MMODEL * |
196 | | #elif defined(__lzo_byte_struct) |
197 | | __lzo_byte_struct(lzo_memops_TU4_struct,4) |
198 | | typedef struct lzo_memops_TU4_struct lzo_memops_TU4; |
199 | | #else |
200 | | struct lzo_memops_TU4_struct { unsigned char a[4]; } __lzo_may_alias; |
201 | | typedef struct lzo_memops_TU4_struct lzo_memops_TU4; |
202 | | #endif |
203 | | #ifndef lzo_memops_TU4p |
204 | | #define lzo_memops_TU4p lzo_memops_TU4 __LZO_MMODEL * |
205 | | #endif |
206 | | #endif |
207 | | #ifndef lzo_memops_TU8p |
208 | | #if (LZO_OPT_UNALIGNED64) |
209 | | typedef lzo_uint64_t __lzo_may_alias lzo_memops_TU8; |
210 | | #define lzo_memops_TU8p volatile lzo_memops_TU8 __LZO_MMODEL * |
211 | | #elif defined(__lzo_byte_struct) |
212 | | __lzo_byte_struct(lzo_memops_TU8_struct,8) |
213 | | typedef struct lzo_memops_TU8_struct lzo_memops_TU8; |
214 | | #else |
215 | | struct lzo_memops_TU8_struct { unsigned char a[8]; } __lzo_may_alias; |
216 | | typedef struct lzo_memops_TU8_struct lzo_memops_TU8; |
217 | | #endif |
218 | | #ifndef lzo_memops_TU8p |
219 | | #define lzo_memops_TU8p lzo_memops_TU8 __LZO_MMODEL * |
220 | | #endif |
221 | | #endif |
222 | | #ifndef lzo_memops_set_TU1p |
223 | 223k | #define lzo_memops_set_TU1p volatile lzo_memops_TU1p |
224 | | #endif |
225 | | #ifndef lzo_memops_move_TU1p |
226 | | #define lzo_memops_move_TU1p lzo_memops_TU1p |
227 | | #endif |
228 | | #define LZO_MEMOPS_SET1(dd,cc) \ |
229 | 223k | LZO_BLOCK_BEGIN \ |
230 | 223k | lzo_memops_set_TU1p d__1 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \ |
231 | 223k | d__1[0] = LZO_BYTE(cc); \ |
232 | 223k | LZO_BLOCK_END |
233 | | #define LZO_MEMOPS_SET2(dd,cc) \ |
234 | | LZO_BLOCK_BEGIN \ |
235 | | lzo_memops_set_TU1p d__2 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \ |
236 | | d__2[0] = LZO_BYTE(cc); d__2[1] = LZO_BYTE(cc); \ |
237 | | LZO_BLOCK_END |
238 | | #define LZO_MEMOPS_SET3(dd,cc) \ |
239 | | LZO_BLOCK_BEGIN \ |
240 | | lzo_memops_set_TU1p d__3 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \ |
241 | | d__3[0] = LZO_BYTE(cc); d__3[1] = LZO_BYTE(cc); d__3[2] = LZO_BYTE(cc); \ |
242 | | LZO_BLOCK_END |
243 | | #define LZO_MEMOPS_SET4(dd,cc) \ |
244 | | LZO_BLOCK_BEGIN \ |
245 | | lzo_memops_set_TU1p d__4 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \ |
246 | | d__4[0] = LZO_BYTE(cc); d__4[1] = LZO_BYTE(cc); d__4[2] = LZO_BYTE(cc); d__4[3] = LZO_BYTE(cc); \ |
247 | | LZO_BLOCK_END |
248 | | #define LZO_MEMOPS_MOVE1(dd,ss) \ |
249 | | LZO_BLOCK_BEGIN \ |
250 | | lzo_memops_move_TU1p d__1 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \ |
251 | | const lzo_memops_move_TU1p s__1 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \ |
252 | | d__1[0] = s__1[0]; \ |
253 | | LZO_BLOCK_END |
254 | | #define LZO_MEMOPS_MOVE2(dd,ss) \ |
255 | | LZO_BLOCK_BEGIN \ |
256 | | lzo_memops_move_TU1p d__2 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \ |
257 | | const lzo_memops_move_TU1p s__2 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \ |
258 | | d__2[0] = s__2[0]; d__2[1] = s__2[1]; \ |
259 | | LZO_BLOCK_END |
260 | | #define LZO_MEMOPS_MOVE3(dd,ss) \ |
261 | | LZO_BLOCK_BEGIN \ |
262 | | lzo_memops_move_TU1p d__3 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \ |
263 | | const lzo_memops_move_TU1p s__3 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \ |
264 | | d__3[0] = s__3[0]; d__3[1] = s__3[1]; d__3[2] = s__3[2]; \ |
265 | | LZO_BLOCK_END |
266 | | #define LZO_MEMOPS_MOVE4(dd,ss) \ |
267 | | LZO_BLOCK_BEGIN \ |
268 | | lzo_memops_move_TU1p d__4 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \ |
269 | | const lzo_memops_move_TU1p s__4 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \ |
270 | | d__4[0] = s__4[0]; d__4[1] = s__4[1]; d__4[2] = s__4[2]; d__4[3] = s__4[3]; \ |
271 | | LZO_BLOCK_END |
272 | | #define LZO_MEMOPS_MOVE8(dd,ss) \ |
273 | | LZO_BLOCK_BEGIN \ |
274 | | lzo_memops_move_TU1p d__8 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \ |
275 | | const lzo_memops_move_TU1p s__8 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \ |
276 | | d__8[0] = s__8[0]; d__8[1] = s__8[1]; d__8[2] = s__8[2]; d__8[3] = s__8[3]; \ |
277 | | d__8[4] = s__8[4]; d__8[5] = s__8[5]; d__8[6] = s__8[6]; d__8[7] = s__8[7]; \ |
278 | | LZO_BLOCK_END |
279 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU1p)0)==1) |
280 | | #define LZO_MEMOPS_COPY1(dd,ss) LZO_MEMOPS_MOVE1(dd,ss) |
281 | | #if (LZO_OPT_UNALIGNED16) |
282 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU2p)0)==2) |
283 | | #define LZO_MEMOPS_COPY2(dd,ss) \ |
284 | | * (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss) |
285 | | #elif defined(lzo_memops_tcheck__) |
286 | | #define LZO_MEMOPS_COPY2(dd,ss) \ |
287 | | LZO_BLOCK_BEGIN if (lzo_memops_tcheck__(lzo_memops_TU2,2,1)) { \ |
288 | | * (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss); \ |
289 | | } else { LZO_MEMOPS_MOVE2(dd,ss); } LZO_BLOCK_END |
290 | | #else |
291 | | #define LZO_MEMOPS_COPY2(dd,ss) LZO_MEMOPS_MOVE2(dd,ss) |
292 | | #endif |
293 | | #if (LZO_OPT_UNALIGNED32) |
294 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU4p)0)==4) |
295 | | #define LZO_MEMOPS_COPY4(dd,ss) \ |
296 | 116k | * (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss) |
297 | | #elif defined(lzo_memops_tcheck__) |
298 | | #define LZO_MEMOPS_COPY4(dd,ss) \ |
299 | | LZO_BLOCK_BEGIN if (lzo_memops_tcheck__(lzo_memops_TU4,4,1)) { \ |
300 | | * (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss); \ |
301 | | } else { LZO_MEMOPS_MOVE4(dd,ss); } LZO_BLOCK_END |
302 | | #else |
303 | | #define LZO_MEMOPS_COPY4(dd,ss) LZO_MEMOPS_MOVE4(dd,ss) |
304 | | #endif |
305 | | #if (LZO_WORDSIZE != 8) |
306 | | #define LZO_MEMOPS_COPY8(dd,ss) \ |
307 | | LZO_BLOCK_BEGIN LZO_MEMOPS_COPY4(dd,ss); LZO_MEMOPS_COPY4((lzo_memops_TU1p)(lzo_memops_TU0p)(dd)+4,(const lzo_memops_TU1p)(const lzo_memops_TU0p)(ss)+4); LZO_BLOCK_END |
308 | | #else |
309 | | #if (LZO_OPT_UNALIGNED64) |
310 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU8p)0)==8) |
311 | | #define LZO_MEMOPS_COPY8(dd,ss) \ |
312 | 9.49M | * (lzo_memops_TU8p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss) |
313 | | #elif (LZO_OPT_UNALIGNED32) |
314 | | #define LZO_MEMOPS_COPY8(dd,ss) \ |
315 | | LZO_BLOCK_BEGIN LZO_MEMOPS_COPY4(dd,ss); LZO_MEMOPS_COPY4((lzo_memops_TU1p)(lzo_memops_TU0p)(dd)+4,(const lzo_memops_TU1p)(const lzo_memops_TU0p)(ss)+4); LZO_BLOCK_END |
316 | | #elif defined(lzo_memops_tcheck__) |
317 | | #define LZO_MEMOPS_COPY8(dd,ss) \ |
318 | | LZO_BLOCK_BEGIN if (lzo_memops_tcheck__(lzo_memops_TU8,8,1)) { \ |
319 | | * (lzo_memops_TU8p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss); \ |
320 | | } else { LZO_MEMOPS_MOVE8(dd,ss); } LZO_BLOCK_END |
321 | | #else |
322 | | #define LZO_MEMOPS_COPY8(dd,ss) LZO_MEMOPS_MOVE8(dd,ss) |
323 | | #endif |
324 | | #endif |
325 | | #define LZO_MEMOPS_COPYN(dd,ss,nn) \ |
326 | 945 | LZO_BLOCK_BEGIN \ |
327 | 945 | lzo_memops_TU1p d__n = (lzo_memops_TU1p) (lzo_memops_TU0p) (dd); \ |
328 | 945 | const lzo_memops_TU1p s__n = (const lzo_memops_TU1p) (const lzo_memops_TU0p) (ss); \ |
329 | 945 | lzo_uint n__n = (nn); \ |
330 | 3.10M | while ((void)0, n__n >= 8) { LZO_MEMOPS_COPY8(d__n, s__n); d__n += 8; s__n += 8; n__n -= 8; } \ |
331 | 945 | if ((void)0, n__n >= 4) { LZO_MEMOPS_COPY4(d__n, s__n); d__n += 4; s__n += 4; n__n -= 4; } \ |
332 | 1.21k | if ((void)0, n__n > 0) do { *d__n++ = *s__n++; } while (--n__n > 0); \ |
333 | 945 | LZO_BLOCK_END |
334 | | |
335 | | __lzo_static_forceinline lzo_uint16_t lzo_memops_get_le16(const lzo_voidp ss) |
336 | 0 | { |
337 | 0 | lzo_uint16_t v; |
338 | 0 | #if (LZO_ABI_LITTLE_ENDIAN) |
339 | 0 | LZO_MEMOPS_COPY2(&v, ss); |
340 | 0 | #elif (LZO_OPT_UNALIGNED16 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC) |
341 | 0 | const lzo_memops_TU2p s = (const lzo_memops_TU2p) ss; |
342 | 0 | unsigned long vv; |
343 | 0 | __asm__("lhbrx %0,0,%1" : "=r" (vv) : "r" (s), "m" (*s)); |
344 | 0 | v = (lzo_uint16_t) vv; |
345 | 0 | #else |
346 | 0 | const lzo_memops_TU1p s = (const lzo_memops_TU1p) ss; |
347 | 0 | v = (lzo_uint16_t) (((lzo_uint16_t)s[0]) | ((lzo_uint16_t)s[1] << 8)); |
348 | 0 | #endif |
349 | 0 | return v; |
350 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_get_le16 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_get_le16 Unexecuted instantiation: lzo_init.c:lzo_memops_get_le16 |
351 | | #if (LZO_OPT_UNALIGNED16) && (LZO_ABI_LITTLE_ENDIAN) |
352 | 97.1k | #define LZO_MEMOPS_GET_LE16(ss) (* (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss)) |
353 | | #else |
354 | | #define LZO_MEMOPS_GET_LE16(ss) lzo_memops_get_le16(ss) |
355 | | #endif |
356 | | |
357 | | __lzo_static_forceinline lzo_uint32_t lzo_memops_get_le32(const lzo_voidp ss) |
358 | 0 | { |
359 | 0 | lzo_uint32_t v; |
360 | 0 | #if (LZO_ABI_LITTLE_ENDIAN) |
361 | 0 | LZO_MEMOPS_COPY4(&v, ss); |
362 | 0 | #elif (LZO_OPT_UNALIGNED32 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC) |
363 | 0 | const lzo_memops_TU4p s = (const lzo_memops_TU4p) ss; |
364 | 0 | unsigned long vv; |
365 | 0 | __asm__("lwbrx %0,0,%1" : "=r" (vv) : "r" (s), "m" (*s)); |
366 | 0 | v = (lzo_uint32_t) vv; |
367 | 0 | #else |
368 | 0 | const lzo_memops_TU1p s = (const lzo_memops_TU1p) ss; |
369 | 0 | v = (lzo_uint32_t) (((lzo_uint32_t)s[0]) | ((lzo_uint32_t)s[1] << 8) | ((lzo_uint32_t)s[2] << 16) | ((lzo_uint32_t)s[3] << 24)); |
370 | 0 | #endif |
371 | 0 | return v; |
372 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_get_le32 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_get_le32 Unexecuted instantiation: lzo_init.c:lzo_memops_get_le32 |
373 | | #if (LZO_OPT_UNALIGNED32) && (LZO_ABI_LITTLE_ENDIAN) |
374 | 1.51M | #define LZO_MEMOPS_GET_LE32(ss) (* (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss)) |
375 | | #else |
376 | | #define LZO_MEMOPS_GET_LE32(ss) lzo_memops_get_le32(ss) |
377 | | #endif |
378 | | |
379 | | #if (LZO_OPT_UNALIGNED64) && (LZO_ABI_LITTLE_ENDIAN) |
380 | 2 | #define LZO_MEMOPS_GET_LE64(ss) (* (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss)) |
381 | | #endif |
382 | | |
383 | | __lzo_static_forceinline lzo_uint16_t lzo_memops_get_ne16(const lzo_voidp ss) |
384 | 0 | { |
385 | 0 | lzo_uint16_t v; |
386 | 0 | LZO_MEMOPS_COPY2(&v, ss); |
387 | 0 | return v; |
388 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_get_ne16 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_get_ne16 Unexecuted instantiation: lzo_init.c:lzo_memops_get_ne16 |
389 | | #if (LZO_OPT_UNALIGNED16) |
390 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU2p)0)==2) |
391 | 2 | #define LZO_MEMOPS_GET_NE16(ss) (* (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss)) |
392 | | #else |
393 | | #define LZO_MEMOPS_GET_NE16(ss) lzo_memops_get_ne16(ss) |
394 | | #endif |
395 | | |
396 | | __lzo_static_forceinline lzo_uint32_t lzo_memops_get_ne32(const lzo_voidp ss) |
397 | 0 | { |
398 | 0 | lzo_uint32_t v; |
399 | 0 | LZO_MEMOPS_COPY4(&v, ss); |
400 | 0 | return v; |
401 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_get_ne32 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_get_ne32 Unexecuted instantiation: lzo_init.c:lzo_memops_get_ne32 |
402 | | #if (LZO_OPT_UNALIGNED32) |
403 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU4p)0)==4) |
404 | 2 | #define LZO_MEMOPS_GET_NE32(ss) (* (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss)) |
405 | | #else |
406 | | #define LZO_MEMOPS_GET_NE32(ss) lzo_memops_get_ne32(ss) |
407 | | #endif |
408 | | |
409 | | #if (LZO_OPT_UNALIGNED64) |
410 | | LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU8p)0)==8) |
411 | 2.61M | #define LZO_MEMOPS_GET_NE64(ss) (* (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss)) |
412 | | #endif |
413 | | |
414 | | __lzo_static_forceinline void lzo_memops_put_le16(lzo_voidp dd, lzo_uint16_t vv) |
415 | 0 | { |
416 | 0 | #if (LZO_ABI_LITTLE_ENDIAN) |
417 | 0 | LZO_MEMOPS_COPY2(dd, &vv); |
418 | 0 | #elif (LZO_OPT_UNALIGNED16 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC) |
419 | 0 | lzo_memops_TU2p d = (lzo_memops_TU2p) dd; |
420 | 0 | unsigned long v = vv; |
421 | 0 | __asm__("sthbrx %2,0,%1" : "=m" (*d) : "r" (d), "r" (v)); |
422 | 0 | #else |
423 | 0 | lzo_memops_TU1p d = (lzo_memops_TU1p) dd; |
424 | 0 | d[0] = LZO_BYTE((vv ) & 0xff); |
425 | 0 | d[1] = LZO_BYTE((vv >> 8) & 0xff); |
426 | 0 | #endif |
427 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_put_le16 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_put_le16 Unexecuted instantiation: lzo_init.c:lzo_memops_put_le16 |
428 | | #if (LZO_OPT_UNALIGNED16) && (LZO_ABI_LITTLE_ENDIAN) |
429 | | #define LZO_MEMOPS_PUT_LE16(dd,vv) (* (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = (vv)) |
430 | | #else |
431 | | #define LZO_MEMOPS_PUT_LE16(dd,vv) lzo_memops_put_le16(dd,vv) |
432 | | #endif |
433 | | |
434 | | __lzo_static_forceinline void lzo_memops_put_le32(lzo_voidp dd, lzo_uint32_t vv) |
435 | 0 | { |
436 | 0 | #if (LZO_ABI_LITTLE_ENDIAN) |
437 | 0 | LZO_MEMOPS_COPY4(dd, &vv); |
438 | 0 | #elif (LZO_OPT_UNALIGNED32 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC) |
439 | 0 | lzo_memops_TU4p d = (lzo_memops_TU4p) dd; |
440 | 0 | unsigned long v = vv; |
441 | 0 | __asm__("stwbrx %2,0,%1" : "=m" (*d) : "r" (d), "r" (v)); |
442 | 0 | #else |
443 | 0 | lzo_memops_TU1p d = (lzo_memops_TU1p) dd; |
444 | 0 | d[0] = LZO_BYTE((vv ) & 0xff); |
445 | 0 | d[1] = LZO_BYTE((vv >> 8) & 0xff); |
446 | 0 | d[2] = LZO_BYTE((vv >> 16) & 0xff); |
447 | 0 | d[3] = LZO_BYTE((vv >> 24) & 0xff); |
448 | 0 | #endif |
449 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_put_le32 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_put_le32 Unexecuted instantiation: lzo_init.c:lzo_memops_put_le32 |
450 | | #if (LZO_OPT_UNALIGNED32) && (LZO_ABI_LITTLE_ENDIAN) |
451 | | #define LZO_MEMOPS_PUT_LE32(dd,vv) (* (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = (vv)) |
452 | | #else |
453 | | #define LZO_MEMOPS_PUT_LE32(dd,vv) lzo_memops_put_le32(dd,vv) |
454 | | #endif |
455 | | |
456 | | __lzo_static_forceinline void lzo_memops_put_ne16(lzo_voidp dd, lzo_uint16_t vv) |
457 | 0 | { |
458 | 0 | LZO_MEMOPS_COPY2(dd, &vv); |
459 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_put_ne16 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_put_ne16 Unexecuted instantiation: lzo_init.c:lzo_memops_put_ne16 |
460 | | #if (LZO_OPT_UNALIGNED16) |
461 | | #define LZO_MEMOPS_PUT_NE16(dd,vv) (* (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = (vv)) |
462 | | #else |
463 | | #define LZO_MEMOPS_PUT_NE16(dd,vv) lzo_memops_put_ne16(dd,vv) |
464 | | #endif |
465 | | |
466 | | __lzo_static_forceinline void lzo_memops_put_ne32(lzo_voidp dd, lzo_uint32_t vv) |
467 | 0 | { |
468 | 0 | LZO_MEMOPS_COPY4(dd, &vv); |
469 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_put_ne32 Unexecuted instantiation: lzo1x_d1.c:lzo_memops_put_ne32 Unexecuted instantiation: lzo_init.c:lzo_memops_put_ne32 |
470 | | #if (LZO_OPT_UNALIGNED32) |
471 | | #define LZO_MEMOPS_PUT_NE32(dd,vv) (* (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = (vv)) |
472 | | #else |
473 | | #define LZO_MEMOPS_PUT_NE32(dd,vv) lzo_memops_put_ne32(dd,vv) |
474 | | #endif |
475 | | |
476 | | lzo_unused_funcs_impl(void, lzo_memops_unused_funcs)(void) |
477 | 0 | { |
478 | 0 | LZO_UNUSED_FUNC(lzo_memops_unused_funcs); |
479 | 0 | LZO_UNUSED_FUNC(lzo_memops_get_le16); |
480 | 0 | LZO_UNUSED_FUNC(lzo_memops_get_le32); |
481 | 0 | LZO_UNUSED_FUNC(lzo_memops_get_ne16); |
482 | 0 | LZO_UNUSED_FUNC(lzo_memops_get_ne32); |
483 | 0 | LZO_UNUSED_FUNC(lzo_memops_put_le16); |
484 | 0 | LZO_UNUSED_FUNC(lzo_memops_put_le32); |
485 | 0 | LZO_UNUSED_FUNC(lzo_memops_put_ne16); |
486 | 0 | LZO_UNUSED_FUNC(lzo_memops_put_ne32); |
487 | 0 | } Unexecuted instantiation: lzo1x_1.c:lzo_memops_unused_funcs Unexecuted instantiation: lzo1x_d1.c:lzo_memops_unused_funcs Unexecuted instantiation: lzo_init.c:lzo_memops_unused_funcs |
488 | | |
489 | | #endif /* already included */ |
490 | | |
491 | | /* vim:set ts=4 sw=4 et: */ |