Coverage Report

Created: 2025-11-16 07:40

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/ghostpdl/base/gsroprun1.h
Line
Count
Source
1
/* Copyright (C) 2001-2023 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* This file is repeatedly included by gsroprun.c to 'autogenerate' many
18
 * different versions of roprun code. DO NOT USE THIS FILE EXCEPT FROM
19
 * gsroprun.c.
20
 */
21
22
/* Set the following defines as appropriate on entry:
23
 *   TEMPLATE_NAME (Compulsory)  The name of the function to generate
24
 *   SPECIFIC_ROP  (Optional)    If set, the function will base its decision
25
 *                               about whether to provide S and T upon
26
 *                               this value.
27
 *   SPECIFIC_CODE (Optional)    If set, this should expand out to code to
28
 *                               perform the rop. Will be invoked as:
29
 *                               SPECIFIC_ROP(OUT,D,S,T)
30
 *   S_CONST       (Optional)    If set, S will be taken to be constant, else
31
 *                               S will be read from a pointer.
32
 *   T_CONST       (Optional)    If set, T will be taken to be constant, else
33
 *                               T will be read from a pointer.
34
 */
35
36
#if defined(TEMPLATE_NAME)
37
38
#ifdef SPECIFIC_ROP
39
#if rop3_uses_S(SPECIFIC_ROP)
40
#define S_USED
41
#endif
42
#if rop3_uses_T(SPECIFIC_ROP)
43
#define T_USED
44
#endif
45
#else /* !SPECIFIC_ROP */
46
#define S_USED
47
#define T_USED
48
#endif /* SPECIFIC_ROP */
49
50
/* We work in 'chunks' here; for bigendian machines, we can safely use
51
 * chunks of 'int' size. For little endian machines where we have a cheap
52
 * endian swap, we can do likewise. For others, we'll work at the byte
53
 * level. */
54
#if !ARCH_IS_BIG_ENDIAN && !defined(ENDIAN_SWAP_INT)
55
#define CHUNKSIZE 8
56
#define CHUNK byte
57
#define CHUNKONES 255
58
59
#define ADJUST_TO_CHUNK(d,dpos) do {} while (0)
60
61
#else /* ARCH_IS_BIG_ENDIAN || defined(ENDIAN_SWAP_INT) */
62
#if ARCH_LOG2_SIZEOF_INT == 2
63
2.04G
#define CHUNKSIZE 32
64
352M
#define CHUNK unsigned int
65
173M
#define CHUNKONES 0xFFFFFFFFU
66
67
#if ARCH_SIZEOF_PTR == (1<<ARCH_LOG2_SIZEOF_INT)
68
#define ROP_PTRDIFF_T int
69
#else
70
#define ROP_PTRDIFF_T int64_t
71
#endif
72
#define ADJUST_TO_CHUNK(d, dpos)                      \
73
174M
    do { int offset = ((ROP_PTRDIFF_T)d) & ((CHUNKSIZE>>3)-1);  \
74
174M
         d = (CHUNK *)(void *)(((byte *)(void *)d)-offset);   \
75
174M
         dpos += offset<<3;                           \
76
174M
     } while (0)
77
#else
78
/* FIXME: Write more code in here when we find an example. */
79
#endif
80
#endif /* ARCH_IS_BIG_ENDIAN || defined(ENDIAN_SWAP_INT) */
81
82
/* We define an 'RE' macro that reverses the endianness of a chunk, if we
83
 * need it, and does nothing otherwise. */
84
#if !ARCH_IS_BIG_ENDIAN && defined(ENDIAN_SWAP_INT) && (CHUNKSIZE != 8)
85
997M
#define RE(I) ((CHUNK)ENDIAN_SWAP_INT(I))
86
#else /* ARCH_IS_BIG_ENDIAN || !defined(ENDIAN_SWAP_INT) || (CHUNKSIZE == 8) */
87
#define RE(I) (I)
88
#endif /* ARCH_IS_BIG_ENDIAN || !defined(ENDIAN_SWAP_INT) || (CHUNKSIZE == 8) */
89
90
/* In some cases we will need to fetch values from a pointer, and 'skew'
91
 * them. We need 2 variants of this macro. One that is 'SAFE' to use when
92
 * SKEW might be 0, and one that can be faster, because we know that SKEW
93
 * is non zero. */
94
#define SKEW_FETCH(S,s,SKEW) \
95
664M
    do { S = RE((RE(s[0])<<SKEW) | (RE(s[1])>>(CHUNKSIZE-SKEW))); s++; } while (0)
96
#define SAFE_SKEW_FETCH(S,s,SKEW,L,R)                                    \
97
158M
    do { S = RE(((L) ? 0 : (RE(s[0])<<SKEW)) | ((R) ? 0 : (RE(s[1])>>(CHUNKSIZE-SKEW)))); s++; } while (0)
98
99
#if defined(S_USED) && !defined(S_CONST)
100
#define S_SKEW
101
664M
#define FETCH_S           SKEW_FETCH(S,s,s_skew)
102
158M
#define SAFE_FETCH_S(L,R) SAFE_SKEW_FETCH(S,s,s_skew,L,R)
103
#else /* !defined(S_USED) || defined(S_CONST) */
104
#define FETCH_S
105
#define SAFE_FETCH_S(L,R)
106
#endif /* !defined(S_USED) || defined(S_CONST) */
107
108
#if defined(T_USED) && !defined(T_CONST)
109
#define T_SKEW
110
0
#define FETCH_T           SKEW_FETCH(T,t,t_skew)
111
0
#define SAFE_FETCH_T(L,R) SAFE_SKEW_FETCH(T,t,t_skew,L,R)
112
#else /* !defined(T_USED) || defined(T_CONST) */
113
#define FETCH_T
114
#define SAFE_FETCH_T(L,R)
115
#endif /* !defined(T_USED) || defined(T_CONST) */
116
117
static void TEMPLATE_NAME(rop_run_op *op, byte *d_, int len)
118
87.0M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
22.2M
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
87.0M
    CHUNK        lmask, rmask;
124
#ifdef S_USED
125
#ifdef S_CONST
126
0
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
87.0M
    CHUNK        S;
130
    int          s_skew;
131
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
4.72M
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
0
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
#define T 0
146
#undef T_CONST
147
#endif /* !defined(T_USED) */
148
#if defined(S_SKEW) || defined(T_SKEW)
149
    int skewflags = 0;
150
#endif
151
87.0M
    CHUNK        D;
152
87.0M
    int          dpos = op->dpos;
153
87.0M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
87.0M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
87.0M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
87.0M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
87.0M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
87.0M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
4.72M
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
4.72M
#ifdef T_CONST
178
4.72M
        T |= T<<1;
179
4.72M
#endif /* !defined(T_CONST) */
180
4.72M
    }
181
4.72M
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
4.72M
#ifdef T_CONST
186
4.72M
        T |= T<<2;
187
4.72M
#endif /* !defined(T_CONST) */
188
4.72M
    }
189
4.72M
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
4.72M
#ifdef T_CONST
194
4.72M
        T |= T<<4;
195
4.72M
#endif /* !defined(T_CONST) */
196
4.72M
    }
197
#if CHUNKSIZE > 8
198
4.72M
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
4.72M
#ifdef T_CONST
203
4.72M
        T |= T<<8;
204
4.72M
#endif /* !defined(T_CONST) */
205
4.72M
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
4.72M
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
4.72M
#ifdef T_CONST
213
4.72M
        T |= T<<16;
214
4.72M
#endif /* !defined(T_CONST) */
215
4.72M
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
#ifdef S_SKEW
226
    {
227
        int slen, slen2;
228
        int spos = op->s.b.pos;
229
87.0M
        ADJUST_TO_CHUNK(s, spos);
230
        s_skew = spos - dpos;
231
87.0M
        if (s_skew < 0) {
232
65.0M
            s_skew += CHUNKSIZE;
233
65.0M
            s--;
234
65.0M
            skewflags |= 1; /* Suppress reading off left edge */
235
65.0M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
87.0M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
87.0M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
87.0M
        if ((s_skew == 0) || (slen < slen2)) {
243
48.8M
            skewflags |= 4; /* Suppress reading off the right edge */
244
48.8M
        }
245
    }
246
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
0
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
0
        if (t_skew < 0) {
254
0
            t_skew += CHUNKSIZE;
255
0
            t--;
256
0
            skewflags |= 2; /* Suppress reading off left edge */
257
0
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
0
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
0
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
0
        if ((t_skew == 0) || (tlen < tlen2)) {
265
0
            skewflags |= 8; /* Suppress reading off the right edge */
266
0
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
87.0M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
87.0M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
425k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
425k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
425k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
425k
        SPECIFIC_CODE(D, *d, S, T);
278
425k
        *d = (*d & ~lmask) | (D & lmask);
279
425k
        return;
280
425k
    }
281
86.5M
    if ((lmask != CHUNKONES)
282
#if defined(S_SKEW) || defined(T_SKEW)
283
15.5M
        || (skewflags & 3)
284
#endif
285
86.5M
        ) {
286
        /* Unaligned left hand case */
287
70.9M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
70.9M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
70.9M
        SPECIFIC_CODE(D, *d, S, T);
290
70.9M
        *d = (*d & ~lmask) | (D & lmask);
291
70.9M
        d++;
292
70.9M
        len -= CHUNKSIZE;
293
70.9M
    }
294
86.5M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
#ifdef S_SKEW
297
64.8M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
0
            if (t_skew == 0) {
300
0
                do {
301
0
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
0
                    d++;
303
0
                    len -= CHUNKSIZE;
304
0
                } while (len > 0);
305
0
            } else
306
0
#endif /* !defined(T_SKEW) */
307
0
            {
308
546M
                do {
309
546M
                    FETCH_T;
310
546M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
546M
                    d++;
312
546M
                    len -= CHUNKSIZE;
313
546M
                } while (len > 0);
314
0
            }
315
15.8M
        } else
316
48.9M
#endif /* !defined(S_SKEW) */
317
48.9M
        {
318
#ifdef T_SKEW
319
0
            if (t_skew == 0) {
320
0
                do {
321
0
                    FETCH_S;
322
0
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
0
                    d++;
324
0
                    len -= CHUNKSIZE;
325
0
                } while (len > 0);
326
0
            } else
327
0
#endif /* !defined(T_SKEW) */
328
0
            {
329
664M
                do {
330
664M
                    FETCH_S;
331
664M
                    FETCH_T;
332
664M
                    SPECIFIC_CODE(*d, *d, S, T);
333
664M
                    d++;
334
664M
                    len -= CHUNKSIZE;
335
664M
                } while (len > 0);
336
0
            }
337
48.9M
        }
338
64.8M
    }
339
    /* Unaligned right hand case */
340
86.5M
    SAFE_FETCH_S(0,skewflags & 4);
341
86.5M
    SAFE_FETCH_T(0,skewflags & 8);
342
86.5M
    SPECIFIC_CODE(D, *d, S, T);
343
86.5M
    *d = (*d & rmask) | (D & ~rmask);
344
86.5M
}
gsroprun.c:notS_rop_run1_const_t
Line
Count
Source
118
214k
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
214k
    CHUNK        lmask, rmask;
124
214k
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
214k
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
214k
    CHUNK        S;
130
214k
    int          s_skew;
131
214k
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
214k
#define T 0
146
214k
#undef T_CONST
147
214k
#endif /* !defined(T_USED) */
148
214k
#if defined(S_SKEW) || defined(T_SKEW)
149
214k
    int skewflags = 0;
150
214k
#endif
151
214k
    CHUNK        D;
152
214k
    int          dpos = op->dpos;
153
214k
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
214k
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
214k
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
214k
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
214k
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
214k
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
214k
#ifdef S_SKEW
226
214k
    {
227
214k
        int slen, slen2;
228
214k
        int spos = op->s.b.pos;
229
214k
        ADJUST_TO_CHUNK(s, spos);
230
214k
        s_skew = spos - dpos;
231
214k
        if (s_skew < 0) {
232
103k
            s_skew += CHUNKSIZE;
233
103k
            s--;
234
103k
            skewflags |= 1; /* Suppress reading off left edge */
235
103k
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
214k
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
214k
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
214k
        if ((s_skew == 0) || (slen < slen2)) {
243
186k
            skewflags |= 4; /* Suppress reading off the right edge */
244
186k
        }
245
214k
    }
246
214k
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
214k
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
214k
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
35
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
35
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
35
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
35
        SPECIFIC_CODE(D, *d, S, T);
278
35
        *d = (*d & ~lmask) | (D & lmask);
279
35
        return;
280
35
    }
281
214k
    if ((lmask != CHUNKONES)
282
78.0k
#if defined(S_SKEW) || defined(T_SKEW)
283
78.0k
        || (skewflags & 3)
284
214k
#endif
285
214k
        ) {
286
        /* Unaligned left hand case */
287
136k
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
136k
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
136k
        SPECIFIC_CODE(D, *d, S, T);
290
136k
        *d = (*d & ~lmask) | (D & lmask);
291
136k
        d++;
292
136k
        len -= CHUNKSIZE;
293
136k
    }
294
214k
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
206k
#ifdef S_SKEW
297
206k
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
100k
            {
308
4.09M
                do {
309
4.09M
                    FETCH_T;
310
4.09M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
4.09M
                    d++;
312
4.09M
                    len -= CHUNKSIZE;
313
4.09M
                } while (len > 0);
314
100k
            }
315
100k
        } else
316
105k
#endif /* !defined(S_SKEW) */
317
105k
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
105k
            {
329
1.09M
                do {
330
1.09M
                    FETCH_S;
331
1.09M
                    FETCH_T;
332
1.09M
                    SPECIFIC_CODE(*d, *d, S, T);
333
1.09M
                    d++;
334
1.09M
                    len -= CHUNKSIZE;
335
1.09M
                } while (len > 0);
336
105k
            }
337
105k
        }
338
206k
    }
339
    /* Unaligned right hand case */
340
214k
    SAFE_FETCH_S(0,skewflags & 4);
341
214k
    SAFE_FETCH_T(0,skewflags & 8);
342
214k
    SPECIFIC_CODE(D, *d, S, T);
343
214k
    *d = (*d & rmask) | (D & ~rmask);
344
214k
}
Unexecuted instantiation: gsroprun.c:invert_rop_run1
Unexecuted instantiation: gsroprun.c:xor_rop_run1_const_t
gsroprun.c:sets_rop_run1
Line
Count
Source
118
76.7M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
76.7M
    CHUNK        lmask, rmask;
124
76.7M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
76.7M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
76.7M
    CHUNK        S;
130
76.7M
    int          s_skew;
131
76.7M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
76.7M
#define T 0
146
76.7M
#undef T_CONST
147
76.7M
#endif /* !defined(T_USED) */
148
76.7M
#if defined(S_SKEW) || defined(T_SKEW)
149
76.7M
    int skewflags = 0;
150
76.7M
#endif
151
76.7M
    CHUNK        D;
152
76.7M
    int          dpos = op->dpos;
153
76.7M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
76.7M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
76.7M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
76.7M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
76.7M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
76.7M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
76.7M
#ifdef S_SKEW
226
76.7M
    {
227
76.7M
        int slen, slen2;
228
76.7M
        int spos = op->s.b.pos;
229
76.7M
        ADJUST_TO_CHUNK(s, spos);
230
76.7M
        s_skew = spos - dpos;
231
76.7M
        if (s_skew < 0) {
232
55.6M
            s_skew += CHUNKSIZE;
233
55.6M
            s--;
234
55.6M
            skewflags |= 1; /* Suppress reading off left edge */
235
55.6M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
76.7M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
76.7M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
76.7M
        if ((s_skew == 0) || (slen < slen2)) {
243
43.7M
            skewflags |= 4; /* Suppress reading off the right edge */
244
43.7M
        }
245
76.7M
    }
246
76.7M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
76.7M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
76.7M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
409k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
409k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
409k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
409k
        SPECIFIC_CODE(D, *d, S, T);
278
409k
        *d = (*d & ~lmask) | (D & lmask);
279
409k
        return;
280
409k
    }
281
76.3M
    if ((lmask != CHUNKONES)
282
14.9M
#if defined(S_SKEW) || defined(T_SKEW)
283
14.9M
        || (skewflags & 3)
284
76.3M
#endif
285
76.3M
        ) {
286
        /* Unaligned left hand case */
287
61.4M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
61.4M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
61.4M
        SPECIFIC_CODE(D, *d, S, T);
290
61.4M
        *d = (*d & ~lmask) | (D & lmask);
291
61.4M
        d++;
292
61.4M
        len -= CHUNKSIZE;
293
61.4M
    }
294
76.3M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
58.4M
#ifdef S_SKEW
297
58.4M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
15.3M
            {
308
537M
                do {
309
537M
                    FETCH_T;
310
537M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
537M
                    d++;
312
537M
                    len -= CHUNKSIZE;
313
537M
                } while (len > 0);
314
15.3M
            }
315
15.3M
        } else
316
43.1M
#endif /* !defined(S_SKEW) */
317
43.1M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
43.1M
            {
329
647M
                do {
330
647M
                    FETCH_S;
331
647M
                    FETCH_T;
332
647M
                    SPECIFIC_CODE(*d, *d, S, T);
333
647M
                    d++;
334
647M
                    len -= CHUNKSIZE;
335
647M
                } while (len > 0);
336
43.1M
            }
337
43.1M
        }
338
58.4M
    }
339
    /* Unaligned right hand case */
340
76.3M
    SAFE_FETCH_S(0,skewflags & 4);
341
76.3M
    SAFE_FETCH_T(0,skewflags & 8);
342
76.3M
    SPECIFIC_CODE(D, *d, S, T);
343
76.3M
    *d = (*d & rmask) | (D & ~rmask);
344
76.3M
}
gsroprun.c:dors_rop_run1_const_t
Line
Count
Source
118
5.28M
{
119
#ifndef SPECIFIC_CODE
120
    rop_proc     proc = rop_proc_table[op->rop];
121
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
#endif /* !defined(SPECIFIC_CODE) */
123
5.28M
    CHUNK        lmask, rmask;
124
5.28M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
5.28M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
5.28M
    CHUNK        S;
130
5.28M
    int          s_skew;
131
5.28M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
#ifdef T_USED
137
#ifdef T_CONST
138
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
5.28M
#define T 0
146
5.28M
#undef T_CONST
147
5.28M
#endif /* !defined(T_USED) */
148
5.28M
#if defined(S_SKEW) || defined(T_SKEW)
149
5.28M
    int skewflags = 0;
150
5.28M
#endif
151
5.28M
    CHUNK        D;
152
5.28M
    int          dpos = op->dpos;
153
5.28M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
5.28M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
5.28M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
5.28M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
5.28M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
5.28M
    if (rmask == CHUNKONES) rmask = 0;
169
170
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
#ifdef T_CONST
178
        T |= T<<1;
179
#endif /* !defined(T_CONST) */
180
    }
181
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
#ifdef T_CONST
186
        T |= T<<2;
187
#endif /* !defined(T_CONST) */
188
    }
189
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
#ifdef T_CONST
194
        T |= T<<4;
195
#endif /* !defined(T_CONST) */
196
    }
197
#if CHUNKSIZE > 8
198
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
#ifdef T_CONST
203
        T |= T<<8;
204
#endif /* !defined(T_CONST) */
205
    }
206
#endif /* CHUNKSIZE > 8 */
207
#if CHUNKSIZE > 16
208
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
#ifdef T_CONST
213
        T |= T<<16;
214
#endif /* !defined(T_CONST) */
215
    }
216
#endif /* CHUNKSIZE > 16 */
217
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
5.28M
#ifdef S_SKEW
226
5.28M
    {
227
5.28M
        int slen, slen2;
228
5.28M
        int spos = op->s.b.pos;
229
5.28M
        ADJUST_TO_CHUNK(s, spos);
230
5.28M
        s_skew = spos - dpos;
231
5.28M
        if (s_skew < 0) {
232
4.99M
            s_skew += CHUNKSIZE;
233
4.99M
            s--;
234
4.99M
            skewflags |= 1; /* Suppress reading off left edge */
235
4.99M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
5.28M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
5.28M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
5.28M
        if ((s_skew == 0) || (slen < slen2)) {
243
2.44M
            skewflags |= 4; /* Suppress reading off the right edge */
244
2.44M
        }
245
5.28M
    }
246
5.28M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
5.28M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
5.28M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
10.4k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
10.4k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
10.4k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
10.4k
        SPECIFIC_CODE(D, *d, S, T);
278
10.4k
        *d = (*d & ~lmask) | (D & lmask);
279
10.4k
        return;
280
10.4k
    }
281
5.27M
    if ((lmask != CHUNKONES)
282
241k
#if defined(S_SKEW) || defined(T_SKEW)
283
241k
        || (skewflags & 3)
284
5.27M
#endif
285
5.27M
        ) {
286
        /* Unaligned left hand case */
287
5.03M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
5.03M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
5.03M
        SPECIFIC_CODE(D, *d, S, T);
290
5.03M
        *d = (*d & ~lmask) | (D & lmask);
291
5.03M
        d++;
292
5.03M
        len -= CHUNKSIZE;
293
5.03M
    }
294
5.27M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
3.15M
#ifdef S_SKEW
297
3.15M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
165k
            {
308
1.21M
                do {
309
1.21M
                    FETCH_T;
310
1.21M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
1.21M
                    d++;
312
1.21M
                    len -= CHUNKSIZE;
313
1.21M
                } while (len > 0);
314
165k
            }
315
165k
        } else
316
2.98M
#endif /* !defined(S_SKEW) */
317
2.98M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
2.98M
            {
329
7.99M
                do {
330
7.99M
                    FETCH_S;
331
7.99M
                    FETCH_T;
332
7.99M
                    SPECIFIC_CODE(*d, *d, S, T);
333
7.99M
                    d++;
334
7.99M
                    len -= CHUNKSIZE;
335
7.99M
                } while (len > 0);
336
2.98M
            }
337
2.98M
        }
338
3.15M
    }
339
    /* Unaligned right hand case */
340
5.27M
    SAFE_FETCH_S(0,skewflags & 4);
341
5.27M
    SAFE_FETCH_T(0,skewflags & 8);
342
5.27M
    SPECIFIC_CODE(D, *d, S, T);
343
5.27M
    *d = (*d & rmask) | (D & ~rmask);
344
5.27M
}
Unexecuted instantiation: gsroprun.c:generic_rop_run1
gsroprun.c:generic_rop_run1_const_t
Line
Count
Source
118
4.72M
{
119
4.72M
#ifndef SPECIFIC_CODE
120
4.72M
    rop_proc     proc = rop_proc_table[op->rop];
121
4.72M
#define SPECIFIC_CODE(OUT_, D_,S_,T_) OUT_ = proc(D_,S_,T_)
122
4.72M
#endif /* !defined(SPECIFIC_CODE) */
123
4.72M
    CHUNK        lmask, rmask;
124
4.72M
#ifdef S_USED
125
#ifdef S_CONST
126
    CHUNK        S = (CHUNK)op->s.c;
127
#else /* !defined(S_CONST) */
128
4.72M
    const CHUNK *s = (CHUNK *)(void *)op->s.b.ptr;
129
4.72M
    CHUNK        S;
130
4.72M
    int          s_skew;
131
4.72M
#endif /* !defined(S_CONST) */
132
#else /* !defined(S_USED) */
133
#define S 0
134
#undef S_CONST
135
#endif /* !defined(S_USED) */
136
4.72M
#ifdef T_USED
137
4.72M
#ifdef T_CONST
138
4.72M
    CHUNK        T = (CHUNK)op->t.c;
139
#else /* !defined(T_CONST) */
140
    const CHUNK *t = (CHUNK *)(void *)op->t.b.ptr;
141
    CHUNK        T;
142
    int          t_skew;
143
#endif /* !defined(T_CONST) */
144
#else /* !defined(T_USED) */
145
#define T 0
146
#undef T_CONST
147
#endif /* !defined(T_USED) */
148
4.72M
#if defined(S_SKEW) || defined(T_SKEW)
149
4.72M
    int skewflags = 0;
150
4.72M
#endif
151
4.72M
    CHUNK        D;
152
4.72M
    int          dpos = op->dpos;
153
4.72M
    CHUNK       *d = (CHUNK *)(void *)d_;
154
155
    /* Align d to CHUNKSIZE */
156
4.72M
    ADJUST_TO_CHUNK(d,dpos);
157
158
    /* On entry len = length in 'depth' chunks. Change it to be the length
159
     * in bits, and add on the number of bits we skip at the start of the
160
     * run. */
161
4.72M
    len    = len * op->depth + dpos;
162
163
    /* lmask = the set of bits to alter in the output bitmap on the left
164
     * hand edge of the run. rmask = the set of bits NOT to alter in the
165
     * output bitmap on the right hand edge of the run. */
166
4.72M
    lmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & dpos)));
167
4.72M
    rmask  = RE((CHUNKONES>>((CHUNKSIZE-1) & len)));
168
4.72M
    if (rmask == CHUNKONES) rmask = 0;
169
170
4.72M
#if defined(S_CONST) || defined(T_CONST)
171
    /* S and T should be supplied as 'depth' bits. Duplicate them up to be
172
     * byte size (if they are supplied byte sized, that's fine too). */
173
4.72M
    if (op->depth & 1) {
174
#ifdef S_CONST
175
        S |= S<<1;
176
#endif /* !defined(S_CONST) */
177
4.72M
#ifdef T_CONST
178
4.72M
        T |= T<<1;
179
4.72M
#endif /* !defined(T_CONST) */
180
4.72M
    }
181
4.72M
    if (op->depth & 3) {
182
#ifdef S_CONST
183
        S |= S<<2;
184
#endif /* !defined(S_CONST) */
185
4.72M
#ifdef T_CONST
186
4.72M
        T |= T<<2;
187
4.72M
#endif /* !defined(T_CONST) */
188
4.72M
    }
189
4.72M
    if (op->depth & 7) {
190
#ifdef S_CONST
191
        S |= S<<4;
192
#endif /* !defined(S_CONST) */
193
4.72M
#ifdef T_CONST
194
4.72M
        T |= T<<4;
195
4.72M
#endif /* !defined(T_CONST) */
196
4.72M
    }
197
4.72M
#if CHUNKSIZE > 8
198
4.72M
    if (op->depth & 15) {
199
#ifdef S_CONST
200
        S |= S<<8;
201
#endif /* !defined(S_CONST) */
202
4.72M
#ifdef T_CONST
203
4.72M
        T |= T<<8;
204
4.72M
#endif /* !defined(T_CONST) */
205
4.72M
    }
206
4.72M
#endif /* CHUNKSIZE > 8 */
207
4.72M
#if CHUNKSIZE > 16
208
4.72M
    if (op->depth & 31) {
209
#ifdef S_CONST
210
        S |= S<<16;
211
#endif /* !defined(S_CONST) */
212
4.72M
#ifdef T_CONST
213
4.72M
        T |= T<<16;
214
4.72M
#endif /* !defined(T_CONST) */
215
4.72M
    }
216
4.72M
#endif /* CHUNKSIZE > 16 */
217
4.72M
#endif /* defined(S_CONST) || defined(T_CONST) */
218
219
    /* Note #1: This mirrors what the original code did, but I think it has
220
     * the risk of moving s and t back beyond officially allocated space. We
221
     * may be saved by the fact that all blocks have a word or two in front
222
     * of them due to the allocator. If we ever get valgrind properly marking
223
     * allocated blocks as readable etc, then this may throw some spurious
224
     * errors. RJW. */
225
4.72M
#ifdef S_SKEW
226
4.72M
    {
227
4.72M
        int slen, slen2;
228
4.72M
        int spos = op->s.b.pos;
229
4.72M
        ADJUST_TO_CHUNK(s, spos);
230
4.72M
        s_skew = spos - dpos;
231
4.72M
        if (s_skew < 0) {
232
4.28M
            s_skew += CHUNKSIZE;
233
4.28M
            s--;
234
4.28M
            skewflags |= 1; /* Suppress reading off left edge */
235
4.28M
        }
236
        /* We are allowed to read all the data bits, so: len - dpos + tpos
237
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
238
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
239
         * This is larger, then suppress. */
240
4.72M
        slen  = (len + s_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
241
4.72M
        slen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
242
4.72M
        if ((s_skew == 0) || (slen < slen2)) {
243
2.43M
            skewflags |= 4; /* Suppress reading off the right edge */
244
2.43M
        }
245
4.72M
    }
246
4.72M
#endif /* !defined(S_SKEW) */
247
#ifdef T_SKEW
248
    {
249
        int tlen, tlen2;
250
        int tpos = op->t.b.pos;
251
        ADJUST_TO_CHUNK(t, tpos);
252
        t_skew = tpos - dpos;
253
        if (t_skew < 0) {
254
            t_skew += CHUNKSIZE;
255
            t--;
256
            skewflags |= 2; /* Suppress reading off left edge */
257
        }
258
        /* We are allowed to read all the data bits, so: len - dpos + tpos
259
         * We're allowed to read in CHUNKS, so: CHUNKUP(len-dpos+tpos).
260
         * This code will actually read CHUNKUP(len)+CHUNKSIZE bits. If
261
         * This is larger, then suppress. */
262
        tlen  = (len + t_skew    + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
263
        tlen2 = (len + CHUNKSIZE + CHUNKSIZE-1) & ~(CHUNKSIZE-1);
264
        if ((t_skew == 0) || (tlen < tlen2)) {
265
            skewflags |= 8; /* Suppress reading off the right edge */
266
        }
267
    }
268
#endif /* !defined(T_SKEW) */
269
270
4.72M
    len -= CHUNKSIZE; /* len = bytes to do - CHUNKSIZE */
271
    /* len <= 0 means 1 word or less to do */
272
4.72M
    if (len <= 0) {
273
        /* Short case - starts and ends in the same chunk */
274
5.48k
        lmask &= ~rmask; /* Combined mask = bits to alter */
275
5.48k
        SAFE_FETCH_S(skewflags & 1,skewflags & 4);
276
5.48k
        SAFE_FETCH_T(skewflags & 2,skewflags & 8);
277
5.48k
        SPECIFIC_CODE(D, *d, S, T);
278
5.48k
        *d = (*d & ~lmask) | (D & lmask);
279
5.48k
        return;
280
5.48k
    }
281
4.71M
    if ((lmask != CHUNKONES)
282
301k
#if defined(S_SKEW) || defined(T_SKEW)
283
301k
        || (skewflags & 3)
284
4.71M
#endif
285
4.71M
        ) {
286
        /* Unaligned left hand case */
287
4.41M
        SAFE_FETCH_S(skewflags & 1,s_skew == 0);
288
4.41M
        SAFE_FETCH_T(skewflags & 2,t_skew == 0);
289
4.41M
        SPECIFIC_CODE(D, *d, S, T);
290
4.41M
        *d = (*d & ~lmask) | (D & lmask);
291
4.41M
        d++;
292
4.41M
        len -= CHUNKSIZE;
293
4.41M
    }
294
4.71M
    if (len > 0) {
295
        /* Simple middle case (complete destination chunks). */
296
3.01M
#ifdef S_SKEW
297
3.01M
        if (s_skew == 0) {
298
#ifdef T_SKEW
299
            if (t_skew == 0) {
300
                do {
301
                    SPECIFIC_CODE(*d, *d, *s++, *t++);
302
                    d++;
303
                    len -= CHUNKSIZE;
304
                } while (len > 0);
305
            } else
306
#endif /* !defined(T_SKEW) */
307
317k
            {
308
4.30M
                do {
309
4.30M
                    FETCH_T;
310
4.30M
                    SPECIFIC_CODE(*d, *d, *s++, T);
311
4.30M
                    d++;
312
4.30M
                    len -= CHUNKSIZE;
313
4.30M
                } while (len > 0);
314
317k
            }
315
317k
        } else
316
2.69M
#endif /* !defined(S_SKEW) */
317
2.69M
        {
318
#ifdef T_SKEW
319
            if (t_skew == 0) {
320
                do {
321
                    FETCH_S;
322
                    SPECIFIC_CODE(*d, *d, S, *t++);
323
                    d++;
324
                    len -= CHUNKSIZE;
325
                } while (len > 0);
326
            } else
327
#endif /* !defined(T_SKEW) */
328
2.69M
            {
329
8.76M
                do {
330
8.76M
                    FETCH_S;
331
8.76M
                    FETCH_T;
332
8.76M
                    SPECIFIC_CODE(*d, *d, S, T);
333
8.76M
                    d++;
334
8.76M
                    len -= CHUNKSIZE;
335
8.76M
                } while (len > 0);
336
2.69M
            }
337
2.69M
        }
338
3.01M
    }
339
    /* Unaligned right hand case */
340
4.71M
    SAFE_FETCH_S(0,skewflags & 4);
341
4.71M
    SAFE_FETCH_T(0,skewflags & 8);
342
4.71M
    SPECIFIC_CODE(D, *d, S, T);
343
4.71M
    *d = (*d & rmask) | (D & ~rmask);
344
4.71M
}
Unexecuted instantiation: gsroprun.c:generic_rop_run1_const_st
345
346
#undef ADJUST_TO_CHUNK
347
#undef CHUNKSIZE
348
#undef CHUNK
349
#undef CHUNKONES
350
#undef FETCH_S
351
#undef FETCH_T
352
#undef SAFE_FETCH_S
353
#undef SAFE_FETCH_T
354
#undef RE
355
#undef S
356
#undef S_USED
357
#undef S_CONST
358
#undef S_SKEW
359
#undef SKEW_FETCH
360
#undef SAFE_SKEW_FETCH
361
#undef SPECIFIC_CODE
362
#undef SPECIFIC_ROP
363
#undef T
364
#undef T_USED
365
#undef T_CONST
366
#undef T_SKEW
367
#undef TEMPLATE_NAME
368
#undef ROP_PTRDIFF_T
369
370
#else
371
int dummy;
372
#endif