Coverage Report

Created: 2026-02-14 07:09

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/ghostpdl/base/gsroprun1.h
Line
Count
Source
1
/* Copyright (C) 2001-2023 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* This file is repeatedly included by gsroprun.c to 'autogenerate' many
18
 * different versions of roprun code. DO NOT USE THIS FILE EXCEPT FROM
19
 * gsroprun.c.
20
 */
21
22
/* Set the following defines as appropriate on entry:
23
 *   TEMPLATE_NAME (Compulsory)  The name of the function to generate
24
 *   SPECIFIC_ROP  (Optional)    If set, the function will base its decision
25
 *                               about whether to provide S and T upon
26
 *                               this value.
27
 *   SPECIFIC_CODE (Optional)    If set, this should expand out to code to
28
 *                               perform the rop. Will be invoked as:
29
 *                               SPECIFIC_ROP(OUT,D,S,T)
30
 *   S_CONST       (Optional)    If set, S will be taken to be constant, else
31
 *                               S will be read from a pointer.
32
 *   T_CONST       (Optional)    If set, T will be taken to be constant, else
33
 *                               T will be read from a pointer.
34
 */
35
36
#if defined(TEMPLATE_NAME)
37
38
#ifdef SPECIFIC_ROP
39
#if rop3_uses_S(SPECIFIC_ROP)
40
#define S_USED
41
#endif
42
#if rop3_uses_T(SPECIFIC_ROP)
43
#define T_USED
44
#endif
45
#else /* !SPECIFIC_ROP */
46
#define S_USED
47
#define T_USED
48
#endif /* SPECIFIC_ROP */
49
50
/* We work in 'chunks' here; for bigendian machines, we can safely use
51
 * chunks of 'int' size. For little endian machines where we have a cheap
52
 * endian swap, we can do likewise. For others, we'll work at the byte
53
 * level. */
54
#if !ARCH_IS_BIG_ENDIAN && !defined(ENDIAN_SWAP_INT)
55
#define CHUNKSIZE 8
56
#define CHUNK byte
57
#define CHUNKONES 255
58
59
#define ADJUST_TO_CHUNK(d,dpos) do {} while (0)
60
61
#else /* ARCH_IS_BIG_ENDIAN || defined(ENDIAN_SWAP_INT) */
62
#if ARCH_LOG2_SIZEOF_INT == 2
63
1.43G
#define CHUNKSIZE 32
64
227M
#define CHUNK unsigned int
65
111M
#define CHUNKONES 0xFFFFFFFFU
66
67
#if ARCH_SIZEOF_PTR == (1<<ARCH_LOG2_SIZEOF_INT)
68
#define ROP_PTRDIFF_T int
69
#else
70
#define ROP_PTRDIFF_T int64_t
71
#endif
72
#define ADJUST_TO_CHUNK(d, dpos)                      \
73
111M
    do { int offset = ((ROP_PTRDIFF_T)d) & ((CHUNKSIZE>>3)-1);  \
74
111M
         d = (CHUNK *)(void *)(((byte *)(void *)d)-offset);   \
75
111M
         dpos += offset<<3;                           \
76
111M
     } while (0)
77
#else
78
/* FIXME: Write more code in here when we find an example. */
79
#endif
80
#endif /* ARCH_IS_BIG_ENDIAN || defined(ENDIAN_SWAP_INT) */
81
82
/* We define an 'RE' macro that reverses the endianness of a chunk, if we
83
 * need it, and does nothing otherwise. */
84
#if !ARCH_IS_BIG_ENDIAN && defined(ENDIAN_SWAP_INT) && (CHUNKSIZE != 8)
85
682M
#define RE(I) ((CHUNK)ENDIAN_SWAP_INT(I))
86
#else /* ARCH_IS_BIG_ENDIAN || !defined(ENDIAN_SWAP_INT) || (CHUNKSIZE == 8) */
87
#define RE(I) (I)
88
#endif /* ARCH_IS_BIG_ENDIAN || !defined(ENDIAN_SWAP_INT) || (CHUNKSIZE == 8) */
89
90
/* In some cases we will need to fetch values from a pointer, and 'skew'
91
 * them. We need 2 variants of this macro. One that is 'SAFE' to use when
92
 * SKEW might be 0, and one that can be faster, because we know that SKEW
93
 * is non zero. */
94
#define SKEW_FETCH(S,s,SKEW) \
95
469M
    do { S = RE((RE(s[0])<<SKEW) | (RE(s[1])>>(CHUNKSIZE-SKEW))); s++; } while (0)
96
#define SAFE_SKEW_FETCH(S,s,SKEW,L,R)                                    \
97
101M
    do { S = RE(((L) ? 0 : (RE(s[0])<<SKEW)) | ((R) ? 0 : (RE(s[1])>>(CHUNKSIZE-SKEW)))); s++; } while (0)
98
99
#if defined(S_USED) && !defined(S_CONST)
100
#define S_SKEW
101
469M
#define FETCH_S           SKEW_FETCH(S,s,s_skew)
102
101M
#define SAFE_FETCH_S(L,R) SAFE_SKEW_FETCH(S,s,s_skew,L,R)
103
#else /* !defined(S_USED) || defined(S_CONST) */
104
#define FETCH_S
105
#define SAFE_FETCH_S(L,R)
106
#endif /* !defined(S_USED) || defined(S_CONST) */
107
108
#if defined(T_USED) && !defined(T_CONST)
109
#define T_SKEW
110
0
#define FETCH_T           SKEW_FETCH(T,t,t_skew)
111
0
#define SAFE_FETCH_T(L,R) SAFE_SKEW_FETCH(T,t,t_skew,L,R)
112
#else /* !defined(T_USED) || defined(T_CONST) */
113
#define FETCH_T
114
#define SAFE_FETCH_T(L,R)
115
#endif /* !defined(T_USED) || defined(T_CONST) */
116
117
static void TEMPLATE_NAME(rop_run_op *op, byte *d_, int len)
118
55.8M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
20.9M
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
55.8M
    CHUNK        lmask, rmask;
124
#ifdef S_USED
125
#ifdef S_CONST
126
0
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
55.8M
    CHUNK        S;
130
    int          s_skew;
131
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
4.49M
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
0
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
#define T 0
146
#undef T_CONST
147
#endif /* !defined(T_USED) */
148
#if defined(S_SKEW) || defined(T_SKEW)
149
    int skewflags = 0;
150
#endif
151
55.8M
    CHUNK        D;
152
55.8M
    int          dpos = op->dpos;
153
55.8M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
55.8M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
55.8M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
55.8M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
55.8M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
55.8M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
4.49M
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
4.49M
#ifdef T_CONST
178
4.49M
        T |= T<<1;
179
4.49M
#endif /* !defined(T_CONST) */
180
4.49M
    }
181
4.49M
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
4.49M
#ifdef T_CONST
186
4.49M
        T |= T<<2;
187
4.49M
#endif /* !defined(T_CONST) */
188
4.49M
    }
189
4.49M
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
4.49M
#ifdef T_CONST
194
4.49M
        T |= T<<4;
195
4.49M
#endif /* !defined(T_CONST) */
196
4.49M
    }
197
#if CHUNKSIZE > 8
198
4.49M
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
4.49M
#ifdef T_CONST
203
4.49M
        T |= T<<8;
204
4.49M
#endif /* !defined(T_CONST) */
205
4.49M
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
4.49M
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
4.49M
#ifdef T_CONST
213
4.49M
        T |= T<<16;
214
4.49M
#endif /* !defined(T_CONST) */
215
4.49M
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
#ifdef S_SKEW
226
    {
227
        int slen, slen2;
228
        int spos = op->s.b.pos;
229
55.8M
        ADJUST_TO_CHUNK(s, spos);
230
        s_skew = spos - dpos;
231
55.8M
        if (s_skew < 0) {
232
40.9M
            s_skew += CHUNKSIZE;
233
40.9M
            s--;
234
40.9M
            skewflags |= 1; /* Suppress reading off left edge */
235
40.9M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
55.8M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
55.8M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
55.8M
        if ((s_skew == 0) || (slen < slen2)) {
243
30.9M
            skewflags |= 4; /* Suppress reading off the right edge */
244
30.9M
        }
245
    }
246
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
0
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
0
        if (t_skew < 0) {
254
0
            t_skew += CHUNKSIZE;
255
0
            t--;
256
0
            skewflags |= 2; /* Suppress reading off left edge */
257
0
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
0
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
0
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
0
        if ((t_skew == 0) || (tlen < tlen2)) {
265
0
            skewflags |= 8; /* Suppress reading off the right edge */
266
0
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
55.8M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
55.8M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
259k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
259k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
259k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
259k
        SPECIFIC_CODE(D, *d, S, T);
278
259k
        *d = (*d & ~lmask) | (D & lmask);
279
259k
        return;
280
259k
    }
281
55.5M
    if ((lmask != CHUNKONES)
282
#if defined(S_SKEW) || defined(T_SKEW)
283
10.0M
        || (skewflags & 3)
284
#endif
285
55.5M
        ) {
286
        /* Unaligned left hand case */
287
45.5M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
45.5M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
45.5M
        SPECIFIC_CODE(D, *d, S, T);
290
45.5M
        *d = (*d & ~lmask) | (D & lmask);
291
45.5M
        d++;
292
45.5M
        len -= CHUNKSIZE;
293
45.5M
    }
294
55.5M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
#ifdef S_SKEW
297
40.9M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
0
            if (t_skew == 0) {
300
0
                do {
301
0
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
0
                    d++;
303
0
                    len -= CHUNKSIZE;
304
0
                } while (len > 0);
305
0
            } else
306
0
#endif /* !defined(T_SKEW) */
307
0
            {
308
428M
                do {
309
428M
                    FETCH_T;
310
428M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
428M
                    d++;
312
428M
                    len -= CHUNKSIZE;
313
428M
                } while (len > 0);
314
0
            }
315
10.2M
        } else
316
30.6M
#endif /* !defined(S_SKEW) */
317
30.6M
        {
318
#ifdef T_SKEW
319
0
            if (t_skew == 0) {
320
0
                do {
321
0
                    FETCH_S;
322
0
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
0
                    d++;
324
0
                    len -= CHUNKSIZE;
325
0
                } while (len > 0);
326
0
            } else
327
0
#endif /* !defined(T_SKEW) */
328
0
            {
329
469M
                do {
330
469M
                    FETCH_S;
331
469M
                    FETCH_T;
332
469M
                    SPECIFIC_CODE(*d, *d, S, T);
333
469M
                    d++;
334
469M
                    len -= CHUNKSIZE;
335
469M
                } while (len > 0);
336
0
            }
337
30.6M
        }
338
40.9M
    }
339
    /* Unaligned right hand case */
340
55.5M
    SAFE_FETCH_S(0,skewflags & 4);
341
55.5M
    SAFE_FETCH_T(0,skewflags & 8);
342
55.5M
    SPECIFIC_CODE(D, *d, S, T);
343
55.5M
    *d = (*d & rmask) | (D & ~rmask);
344
55.5M
}
gsroprun.c:notS_rop_run1_const_t
Line
Count
Source
118
142k
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
142k
    CHUNK        lmask, rmask;
124
142k
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
142k
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
142k
    CHUNK        S;
130
142k
    int          s_skew;
131
142k
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
142k
#define T 0
146
142k
#undef T_CONST
147
142k
#endif /* !defined(T_USED) */
148
142k
#if defined(S_SKEW) || defined(T_SKEW)
149
142k
    int skewflags = 0;
150
142k
#endif
151
142k
    CHUNK        D;
152
142k
    int          dpos = op->dpos;
153
142k
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
142k
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
142k
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
142k
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
142k
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
142k
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
142k
#ifdef S_SKEW
226
142k
    {
227
142k
        int slen, slen2;
228
142k
        int spos = op->s.b.pos;
229
142k
        ADJUST_TO_CHUNK(s, spos);
230
142k
        s_skew = spos - dpos;
231
142k
        if (s_skew < 0) {
232
92.2k
            s_skew += CHUNKSIZE;
233
92.2k
            s--;
234
92.2k
            skewflags |= 1; /* Suppress reading off left edge */
235
92.2k
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
142k
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
142k
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
142k
        if ((s_skew == 0) || (slen < slen2)) {
243
123k
            skewflags |= 4; /* Suppress reading off the right edge */
244
123k
        }
245
142k
    }
246
142k
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
142k
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
142k
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
3
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
3
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
3
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
3
        SPECIFIC_CODE(D, *d, S, T);
278
3
        *d = (*d & ~lmask) | (D & lmask);
279
3
        return;
280
3
    }
281
142k
    if ((lmask != CHUNKONES)
282
22.0k
#if defined(S_SKEW) || defined(T_SKEW)
283
22.0k
        || (skewflags & 3)
284
142k
#endif
285
142k
        ) {
286
        /* Unaligned left hand case */
287
120k
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
120k
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
120k
        SPECIFIC_CODE(D, *d, S, T);
290
120k
        *d = (*d & ~lmask) | (D & lmask);
291
120k
        d++;
292
120k
        len -= CHUNKSIZE;
293
120k
    }
294
142k
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
142k
#ifdef S_SKEW
297
142k
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
44.0k
            {
308
1.30M
                do {
309
1.30M
                    FETCH_T;
310
1.30M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
1.30M
                    d++;
312
1.30M
                    len -= CHUNKSIZE;
313
1.30M
                } while (len > 0);
314
44.0k
            }
315
44.0k
        } else
316
98.2k
#endif /* !defined(S_SKEW) */
317
98.2k
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
98.2k
            {
329
991k
                do {
330
991k
                    FETCH_S;
331
991k
                    FETCH_T;
332
991k
                    SPECIFIC_CODE(*d, *d, S, T);
333
991k
                    d++;
334
991k
                    len -= CHUNKSIZE;
335
991k
                } while (len > 0);
336
98.2k
            }
337
98.2k
        }
338
142k
    }
339
    /* Unaligned right hand case */
340
142k
    SAFE_FETCH_S(0,skewflags & 4);
341
142k
    SAFE_FETCH_T(0,skewflags & 8);
342
142k
    SPECIFIC_CODE(D, *d, S, T);
343
142k
    *d = (*d & rmask) | (D & ~rmask);
344
142k
}
Unexecuted instantiation: gsroprun.c:invert_rop_run1
Unexecuted instantiation: gsroprun.c:xor_rop_run1_const_t
gsroprun.c:sets_rop_run1
Line
Count
Source
118
46.0M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
46.0M
    CHUNK        lmask, rmask;
124
46.0M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
46.0M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
46.0M
    CHUNK        S;
130
46.0M
    int          s_skew;
131
46.0M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
46.0M
#define T 0
146
46.0M
#undef T_CONST
147
46.0M
#endif /* !defined(T_USED) */
148
46.0M
#if defined(S_SKEW) || defined(T_SKEW)
149
46.0M
    int skewflags = 0;
150
46.0M
#endif
151
46.0M
    CHUNK        D;
152
46.0M
    int          dpos = op->dpos;
153
46.0M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
46.0M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
46.0M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
46.0M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
46.0M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
46.0M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
46.0M
#ifdef S_SKEW
226
46.0M
    {
227
46.0M
        int slen, slen2;
228
46.0M
        int spos = op->s.b.pos;
229
46.0M
        ADJUST_TO_CHUNK(s, spos);
230
46.0M
        s_skew = spos - dpos;
231
46.0M
        if (s_skew < 0) {
232
32.0M
            s_skew += CHUNKSIZE;
233
32.0M
            s--;
234
32.0M
            skewflags |= 1; /* Suppress reading off left edge */
235
32.0M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
46.0M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
46.0M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
46.0M
        if ((s_skew == 0) || (slen < slen2)) {
243
26.0M
            skewflags |= 4; /* Suppress reading off the right edge */
244
26.0M
        }
245
46.0M
    }
246
46.0M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
46.0M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
46.0M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
244k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
244k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
244k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
244k
        SPECIFIC_CODE(D, *d, S, T);
278
244k
        *d = (*d & ~lmask) | (D & lmask);
279
244k
        return;
280
244k
    }
281
45.8M
    if ((lmask != CHUNKONES)
282
9.47M
#if defined(S_SKEW) || defined(T_SKEW)
283
9.47M
        || (skewflags & 3)
284
45.8M
#endif
285
45.8M
        ) {
286
        /* Unaligned left hand case */
287
36.3M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
36.3M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
36.3M
        SPECIFIC_CODE(D, *d, S, T);
290
36.3M
        *d = (*d & ~lmask) | (D & lmask);
291
36.3M
        d++;
292
36.3M
        len -= CHUNKSIZE;
293
36.3M
    }
294
45.8M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
34.8M
#ifdef S_SKEW
297
34.8M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
9.74M
            {
308
422M
                do {
309
422M
                    FETCH_T;
310
422M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
422M
                    d++;
312
422M
                    len -= CHUNKSIZE;
313
422M
                } while (len > 0);
314
9.74M
            }
315
9.74M
        } else
316
25.1M
#endif /* !defined(S_SKEW) */
317
25.1M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
25.1M
            {
329
451M
                do {
330
451M
                    FETCH_S;
331
451M
                    FETCH_T;
332
451M
                    SPECIFIC_CODE(*d, *d, S, T);
333
451M
                    d++;
334
451M
                    len -= CHUNKSIZE;
335
451M
                } while (len > 0);
336
25.1M
            }
337
25.1M
        }
338
34.8M
    }
339
    /* Unaligned right hand case */
340
45.8M
    SAFE_FETCH_S(0,skewflags & 4);
341
45.8M
    SAFE_FETCH_T(0,skewflags & 8);
342
45.8M
    SPECIFIC_CODE(D, *d, S, T);
343
45.8M
    *d = (*d & rmask) | (D & ~rmask);
344
45.8M
}
gsroprun.c:dors_rop_run1_const_t
Line
Count
Source
118
5.07M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
5.07M
    CHUNK        lmask, rmask;
124
5.07M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
5.07M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
5.07M
    CHUNK        S;
130
5.07M
    int          s_skew;
131
5.07M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
5.07M
#define T 0
146
5.07M
#undef T_CONST
147
5.07M
#endif /* !defined(T_USED) */
148
5.07M
#if defined(S_SKEW) || defined(T_SKEW)
149
5.07M
    int skewflags = 0;
150
5.07M
#endif
151
5.07M
    CHUNK        D;
152
5.07M
    int          dpos = op->dpos;
153
5.07M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
5.07M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
5.07M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
5.07M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
5.07M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
5.07M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
5.07M
#ifdef S_SKEW
226
5.07M
    {
227
5.07M
        int slen, slen2;
228
5.07M
        int spos = op->s.b.pos;
229
5.07M
        ADJUST_TO_CHUNK(s, spos);
230
5.07M
        s_skew = spos - dpos;
231
5.07M
        if (s_skew < 0) {
232
4.79M
            s_skew += CHUNKSIZE;
233
4.79M
            s--;
234
4.79M
            skewflags |= 1; /* Suppress reading off left edge */
235
4.79M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
5.07M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
5.07M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
5.07M
        if ((s_skew == 0) || (slen < slen2)) {
243
2.36M
            skewflags |= 4; /* Suppress reading off the right edge */
244
2.36M
        }
245
5.07M
    }
246
5.07M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
5.07M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
5.07M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
9.92k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
9.92k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
9.92k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
9.92k
        SPECIFIC_CODE(D, *d, S, T);
278
9.92k
        *d = (*d & ~lmask) | (D & lmask);
279
9.92k
        return;
280
9.92k
    }
281
5.06M
    if ((lmask != CHUNKONES)
282
232k
#if defined(S_SKEW) || defined(T_SKEW)
283
232k
        || (skewflags & 3)
284
5.06M
#endif
285
5.06M
        ) {
286
        /* Unaligned left hand case */
287
4.83M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
4.83M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
4.83M
        SPECIFIC_CODE(D, *d, S, T);
290
4.83M
        *d = (*d & ~lmask) | (D & lmask);
291
4.83M
        d++;
292
4.83M
        len -= CHUNKSIZE;
293
4.83M
    }
294
5.06M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
3.04M
#ifdef S_SKEW
297
3.04M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
156k
            {
308
1.16M
                do {
309
1.16M
                    FETCH_T;
310
1.16M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
1.16M
                    d++;
312
1.16M
                    len -= CHUNKSIZE;
313
1.16M
                } while (len > 0);
314
156k
            }
315
156k
        } else
316
2.89M
#endif /* !defined(S_SKEW) */
317
2.89M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
2.89M
            {
329
7.82M
                do {
330
7.82M
                    FETCH_S;
331
7.82M
                    FETCH_T;
332
7.82M
                    SPECIFIC_CODE(*d, *d, S, T);
333
7.82M
                    d++;
334
7.82M
                    len -= CHUNKSIZE;
335
7.82M
                } while (len > 0);
336
2.89M
            }
337
2.89M
        }
338
3.04M
    }
339
    /* Unaligned right hand case */
340
5.06M
    SAFE_FETCH_S(0,skewflags & 4);
341
5.06M
    SAFE_FETCH_T(0,skewflags & 8);
342
5.06M
    SPECIFIC_CODE(D, *d, S, T);
343
5.06M
    *d = (*d & rmask) | (D & ~rmask);
344
5.06M
}
Unexecuted instantiation: gsroprun.c:generic_rop_run1
gsroprun.c:generic_rop_run1_const_t
Line
Count
Source
118
4.49M
{
119
4.49M
#ifndef SPECIFIC_CODE
120
4.49M
    rop_proc     proc = rop_proc_table[op->rop];
121
4.49M
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
4.49M
#endif /* !defined(SPECIFIC_CODE) */
123
4.49M
    CHUNK        lmask, rmask;
124
4.49M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
4.49M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
4.49M
    CHUNK        S;
130
4.49M
    int          s_skew;
131
4.49M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
4.49M
#ifdef T_USED
137
4.49M
#ifdef T_CONST
138
4.49M
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
#define T 0
146
#undef T_CONST
147
#endif /* !defined(T_USED) */
148
4.49M
#if defined(S_SKEW) || defined(T_SKEW)
149
4.49M
    int skewflags = 0;
150
4.49M
#endif
151
4.49M
    CHUNK        D;
152
4.49M
    int          dpos = op->dpos;
153
4.49M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
4.49M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
4.49M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
4.49M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
4.49M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
4.49M
    if (rmask == CHUNKONES) rmask = 0;
169
170
4.49M
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
4.49M
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
4.49M
#ifdef T_CONST
178
4.49M
        T |= T<<1;
179
4.49M
#endif /* !defined(T_CONST) */
180
4.49M
    }
181
4.49M
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
4.49M
#ifdef T_CONST
186
4.49M
        T |= T<<2;
187
4.49M
#endif /* !defined(T_CONST) */
188
4.49M
    }
189
4.49M
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
4.49M
#ifdef T_CONST
194
4.49M
        T |= T<<4;
195
4.49M
#endif /* !defined(T_CONST) */
196
4.49M
    }
197
4.49M
#if CHUNKSIZE > 8
198
4.49M
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
4.49M
#ifdef T_CONST
203
4.49M
        T |= T<<8;
204
4.49M
#endif /* !defined(T_CONST) */
205
4.49M
    }
206
4.49M
#endif /* CHUNKSIZE > 8 */
207
4.49M
#if CHUNKSIZE > 16
208
4.49M
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
4.49M
#ifdef T_CONST
213
4.49M
        T |= T<<16;
214
4.49M
#endif /* !defined(T_CONST) */
215
4.49M
    }
216
4.49M
#endif /* CHUNKSIZE > 16 */
217
4.49M
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
4.49M
#ifdef S_SKEW
226
4.49M
    {
227
4.49M
        int slen, slen2;
228
4.49M
        int spos = op->s.b.pos;
229
4.49M
        ADJUST_TO_CHUNK(s, spos);
230
4.49M
        s_skew = spos - dpos;
231
4.49M
        if (s_skew < 0) {
232
4.08M
            s_skew += CHUNKSIZE;
233
4.08M
            s--;
234
4.08M
            skewflags |= 1; /* Suppress reading off left edge */
235
4.08M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
4.49M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
4.49M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
4.49M
        if ((s_skew == 0) || (slen < slen2)) {
243
2.32M
            skewflags |= 4; /* Suppress reading off the right edge */
244
2.32M
        }
245
4.49M
    }
246
4.49M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
4.49M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
4.49M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
5.06k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
5.06k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
5.06k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
5.06k
        SPECIFIC_CODE(D, *d, S, T);
278
5.06k
        *d = (*d & ~lmask) | (D & lmask);
279
5.06k
        return;
280
5.06k
    }
281
4.48M
    if ((lmask != CHUNKONES)
282
274k
#if defined(S_SKEW) || defined(T_SKEW)
283
274k
        || (skewflags & 3)
284
4.48M
#endif
285
4.48M
        ) {
286
        /* Unaligned left hand case */
287
4.21M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
4.21M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
4.21M
        SPECIFIC_CODE(D, *d, S, T);
290
4.21M
        *d = (*d & ~lmask) | (D & lmask);
291
4.21M
        d++;
292
4.21M
        len -= CHUNKSIZE;
293
4.21M
    }
294
4.48M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
2.86M
#ifdef S_SKEW
297
2.86M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
294k
            {
308
3.94M
                do {
309
3.94M
                    FETCH_T;
310
3.94M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
3.94M
                    d++;
312
3.94M
                    len -= CHUNKSIZE;
313
3.94M
                } while (len > 0);
314
294k
            }
315
294k
        } else
316
2.57M
#endif /* !defined(S_SKEW) */
317
2.57M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
2.57M
            {
329
8.33M
                do {
330
8.33M
                    FETCH_S;
331
8.33M
                    FETCH_T;
332
8.33M
                    SPECIFIC_CODE(*d, *d, S, T);
333
8.33M
                    d++;
334
8.33M
                    len -= CHUNKSIZE;
335
8.33M
                } while (len > 0);
336
2.57M
            }
337
2.57M
        }
338
2.86M
    }
339
    /* Unaligned right hand case */
340
4.48M
    SAFE_FETCH_S(0,skewflags & 4);
341
4.48M
    SAFE_FETCH_T(0,skewflags & 8);
342
4.48M
    SPECIFIC_CODE(D, *d, S, T);
343
4.48M
    *d = (*d & rmask) | (D & ~rmask);
344
4.48M
}
Unexecuted instantiation: gsroprun.c:generic_rop_run1_const_st
345
346
#undef ADJUST_TO_CHUNK
347
#undef CHUNKSIZE
348
#undef CHUNK
349
#undef CHUNKONES
350
#undef FETCH_S
351
#undef FETCH_T
352
#undef SAFE_FETCH_S
353
#undef SAFE_FETCH_T
354
#undef RE
355
#undef S
356
#undef S_USED
357
#undef S_CONST
358
#undef S_SKEW
359
#undef SKEW_FETCH
360
#undef SAFE_SKEW_FETCH
361
#undef SPECIFIC_CODE
362
#undef SPECIFIC_ROP
363
#undef T
364
#undef T_USED
365
#undef T_CONST
366
#undef T_SKEW
367
#undef TEMPLATE_NAME
368
#undef ROP_PTRDIFF_T
369
370
#else
371
int dummy;
372
#endif