Coverage Report

Created: 2025-10-13 07:03

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/pcre2/deps/sljit/sljit_src/sljitNativeX86_64.c
Line
Count
Source
1
/*
2
 *    Stack-less Just-In-Time compiler
3
 *
4
 *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5
 *
6
 * Redistribution and use in source and binary forms, with or without modification, are
7
 * permitted provided that the following conditions are met:
8
 *
9
 *   1. Redistributions of source code must retain the above copyright notice, this list of
10
 *      conditions and the following disclaimer.
11
 *
12
 *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13
 *      of conditions and the following disclaimer in the documentation and/or other materials
14
 *      provided with the distribution.
15
 *
16
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19
 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21
 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22
 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24
 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
 */
26
27
/* x86 64-bit arch dependent functions. */
28
29
/* --------------------------------------------------------------------- */
30
/*  Operators                                                            */
31
/* --------------------------------------------------------------------- */
32
33
static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
34
243M
{
35
243M
  sljit_u8 *inst;
36
37
243M
  inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
38
243M
  FAIL_IF(!inst);
39
243M
  INC_SIZE(2 + sizeof(sljit_sw));
40
243M
  inst[0] = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
41
243M
  inst[1] = U8(MOV_r_i32 | reg_lmap[reg]);
42
243M
  sljit_unaligned_store_sw(inst + 2, imm);
43
243M
  return SLJIT_SUCCESS;
44
243M
}
45
46
static sljit_s32 emit_do_imm32(struct sljit_compiler *compiler, sljit_u8 rex, sljit_u8 opcode, sljit_sw imm)
47
1.47G
{
48
1.47G
  sljit_u8 *inst;
49
1.47G
  sljit_uw length = (rex ? 2 : 1) + sizeof(sljit_s32);
50
51
1.47G
  inst = (sljit_u8*)ensure_buf(compiler, 1 + length);
52
1.47G
  FAIL_IF(!inst);
53
1.47G
  INC_SIZE(length);
54
1.47G
  if (rex)
55
1.47G
    *inst++ = rex;
56
1.47G
  *inst++ = opcode;
57
1.47G
  sljit_unaligned_store_s32(inst, (sljit_s32)imm);
58
1.47G
  return SLJIT_SUCCESS;
59
1.47G
}
60
61
static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw size,
62
  /* The register or immediate operand. */
63
  sljit_s32 a, sljit_sw imma,
64
  /* The general operand (not immediate). */
65
  sljit_s32 b, sljit_sw immb)
66
8.14G
{
67
8.14G
  sljit_u8 *inst;
68
8.14G
  sljit_u8 *buf_ptr;
69
8.14G
  sljit_u8 rex = 0;
70
8.14G
  sljit_u8 reg_lmap_b;
71
8.14G
  sljit_uw flags = size;
72
8.14G
  sljit_uw inst_size;
73
74
  /* The immediate operand must be 32 bit. */
75
8.14G
  SLJIT_ASSERT(a != SLJIT_IMM || compiler->mode32 || IS_HALFWORD(imma));
76
  /* Both cannot be switched on. */
77
8.14G
  SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
78
  /* Size flags not allowed for typed instructions. */
79
8.14G
  SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
80
  /* Both size flags cannot be switched on. */
81
8.14G
  SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
82
  /* SSE2 and immediate is not possible. */
83
8.14G
  SLJIT_ASSERT(a != SLJIT_IMM || !(flags & EX86_SSE2));
84
8.14G
  SLJIT_ASSERT(((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
85
8.14G
      & ((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
86
8.14G
  SLJIT_ASSERT((flags & (EX86_VEX_EXT | EX86_REX)) != EX86_VEX_EXT);
87
88
8.14G
  size &= 0xf;
89
  /* The mod r/m byte is always present. */
90
8.14G
  inst_size = size + 1;
91
92
8.14G
  if (!compiler->mode32 && !(flags & EX86_NO_REXW))
93
7.45G
    rex |= REX_W;
94
697M
  else if (flags & EX86_REX)
95
0
    rex |= REX;
96
97
8.14G
  if (flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
98
3.40M
    inst_size++;
99
100
  /* Calculate size of b. */
101
8.14G
  if (b & SLJIT_MEM) {
102
3.92G
    if (!(b & OFFS_REG_MASK) && NOT_HALFWORD(immb)) {
103
37.6M
      PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb));
104
37.6M
      immb = 0;
105
37.6M
      if (b & REG_MASK)
106
37.6M
        b |= TO_OFFS_REG(TMP_REG2);
107
0
      else
108
0
        b |= TMP_REG2;
109
37.6M
    }
110
111
3.92G
    if (!(b & REG_MASK))
112
0
      inst_size += 1 + sizeof(sljit_s32); /* SIB byte required to avoid RIP based addressing. */
113
3.92G
    else {
114
3.92G
      if (immb != 0 && !(b & OFFS_REG_MASK)) {
115
        /* Immediate operand. */
116
3.42G
        if (immb <= 127 && immb >= -128)
117
2.66G
          inst_size += sizeof(sljit_s8);
118
754M
        else
119
754M
          inst_size += sizeof(sljit_s32);
120
3.42G
      } else if (reg_lmap[b & REG_MASK] == 5) {
121
        /* Swap registers if possible. */
122
21.7k
        if ((b & OFFS_REG_MASK) && (immb & 0x3) == 0 && reg_lmap[OFFS_REG(b)] != 5)
123
0
          b = SLJIT_MEM | OFFS_REG(b) | TO_OFFS_REG(b & REG_MASK);
124
21.7k
        else
125
21.7k
          inst_size += sizeof(sljit_s8);
126
21.7k
      }
127
128
3.92G
      if (reg_map[b & REG_MASK] >= 8)
129
1.63G
        rex |= REX_B;
130
131
3.92G
      if (reg_lmap[b & REG_MASK] == 4 && !(b & OFFS_REG_MASK))
132
1.25G
        b |= TO_OFFS_REG(SLJIT_SP);
133
134
3.92G
      if (b & OFFS_REG_MASK) {
135
1.30G
        inst_size += 1; /* SIB byte. */
136
1.30G
        if (reg_map[OFFS_REG(b)] >= 8)
137
37.6M
          rex |= REX_X;
138
1.30G
      }
139
3.92G
    }
140
4.22G
  } else if (!(flags & EX86_SSE2_OP2)) {
141
4.22G
    if (reg_map[b] >= 8)
142
971M
      rex |= REX_B;
143
4.22G
  } else if (freg_map[b] >= 8)
144
0
    rex |= REX_B;
145
146
8.14G
  if ((flags & EX86_VEX_EXT) && (rex & 0x3)) {
147
0
    SLJIT_ASSERT(size == 2);
148
0
    size++;
149
0
    inst_size++;
150
0
  }
151
152
8.14G
  if (a == SLJIT_IMM) {
153
2.94G
    if (flags & EX86_BIN_INS) {
154
1.86G
      if (imma <= 127 && imma >= -128) {
155
1.53G
        inst_size += sizeof(sljit_s8);
156
1.53G
        flags |= EX86_BYTE_ARG;
157
1.53G
      } else
158
332M
        inst_size += sizeof(sljit_s32);
159
1.86G
    } else if (flags & EX86_SHIFT_INS) {
160
60.6M
      SLJIT_ASSERT(imma <= (compiler->mode32 ? 0x1f : 0x3f));
161
60.6M
      if (imma != 1) {
162
44.6M
        inst_size += sizeof(sljit_s8);
163
44.6M
        flags |= EX86_BYTE_ARG;
164
44.6M
      }
165
1.01G
    } else if (flags & EX86_BYTE_ARG)
166
0
      inst_size += sizeof(sljit_s8);
167
1.01G
    else if (flags & EX86_HALF_ARG)
168
0
      inst_size += sizeof(sljit_s16);
169
1.01G
    else
170
1.01G
      inst_size += sizeof(sljit_s32);
171
5.20G
  } else {
172
5.20G
    SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
173
    /* reg_map[SLJIT_PREF_SHIFT_REG] is less than 8. */
174
5.20G
    if (!(flags & EX86_SSE2_OP1)) {
175
5.20G
      if (reg_map[a] >= 8)
176
756M
        rex |= REX_R;
177
5.20G
    }
178
2.48M
    else if (freg_map[a] >= 8)
179
0
      rex |= REX_R;
180
5.20G
  }
181
182
8.14G
  if (rex)
183
7.74G
    inst_size++;
184
185
8.14G
  inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
186
8.14G
  PTR_FAIL_IF(!inst);
187
188
  /* Encoding prefixes. */
189
8.14G
  INC_SIZE(inst_size);
190
8.14G
  if (flags & EX86_PREF_F2)
191
0
    *inst++ = 0xf2;
192
8.14G
  else if (flags & EX86_PREF_F3)
193
192k
    *inst++ = 0xf3;
194
8.14G
  else if (flags & EX86_PREF_66)
195
3.21M
    *inst++ = 0x66;
196
197
  /* Rex is always the last prefix. */
198
8.14G
  if (rex)
199
7.74G
    *inst++ = rex;
200
201
8.14G
  buf_ptr = inst + size;
202
203
  /* Encode mod/rm byte. */
204
8.14G
  if (!(flags & EX86_SHIFT_INS)) {
205
8.07G
    if ((flags & EX86_BIN_INS) && a == SLJIT_IMM)
206
1.86G
      *inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
207
208
8.07G
    if (a == SLJIT_IMM)
209
2.88G
      *buf_ptr = 0;
210
5.18G
    else if (!(flags & EX86_SSE2_OP1))
211
5.18G
      *buf_ptr = U8(reg_lmap[a] << 3);
212
2.48M
    else
213
2.48M
      *buf_ptr = U8(freg_lmap[a] << 3);
214
8.07G
  } else {
215
74.7M
    if (a == SLJIT_IMM) {
216
60.6M
      if (imma == 1)
217
16.0M
        *inst = GROUP_SHIFT_1;
218
44.6M
      else
219
44.6M
        *inst = GROUP_SHIFT_N;
220
60.6M
    } else
221
14.1M
      *inst = GROUP_SHIFT_CL;
222
74.7M
    *buf_ptr = 0;
223
74.7M
  }
224
225
8.14G
  if (!(b & SLJIT_MEM)) {
226
4.22G
    *buf_ptr = U8(*buf_ptr | MOD_REG | (!(flags & EX86_SSE2_OP2) ? reg_lmap[b] : freg_lmap[b]));
227
4.22G
    buf_ptr++;
228
4.22G
  } else if (b & REG_MASK) {
229
3.92G
    reg_lmap_b = reg_lmap[b & REG_MASK];
230
231
3.92G
    if (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) {
232
3.87G
      if (immb != 0 || reg_lmap_b == 5) {
233
3.42G
        if (immb <= 127 && immb >= -128)
234
2.66G
          *buf_ptr |= 0x40;
235
754M
        else
236
754M
          *buf_ptr |= 0x80;
237
3.42G
      }
238
239
3.87G
      if (!(b & OFFS_REG_MASK))
240
2.62G
        *buf_ptr++ |= reg_lmap_b;
241
1.25G
      else {
242
1.25G
        buf_ptr[0] |= 0x04;
243
1.25G
        buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3));
244
1.25G
        buf_ptr += 2;
245
1.25G
      }
246
247
3.87G
      if (immb != 0 || reg_lmap_b == 5) {
248
3.42G
        if (immb <= 127 && immb >= -128)
249
2.66G
          *buf_ptr++ = U8(immb); /* 8 bit displacement. */
250
754M
        else {
251
754M
          sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */
252
754M
          buf_ptr += sizeof(sljit_s32);
253
754M
        }
254
3.42G
      }
255
3.87G
    } else {
256
52.3M
      if (reg_lmap_b == 5)
257
0
        *buf_ptr |= 0x40;
258
259
52.3M
      buf_ptr[0] |= 0x04;
260
52.3M
      buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6));
261
52.3M
      buf_ptr += 2;
262
263
52.3M
      if (reg_lmap_b == 5)
264
0
        *buf_ptr++ = 0;
265
52.3M
    }
266
3.92G
  } else {
267
0
    buf_ptr[0] |= 0x04;
268
0
    buf_ptr[1] = 0x25;
269
0
    buf_ptr += 2;
270
0
    sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */
271
0
    buf_ptr += sizeof(sljit_s32);
272
0
  }
273
274
8.14G
  if (a == SLJIT_IMM) {
275
2.94G
    if (flags & EX86_BYTE_ARG)
276
1.57G
      *buf_ptr = U8(imma);
277
1.36G
    else if (flags & EX86_HALF_ARG)
278
0
      sljit_unaligned_store_s16(buf_ptr, (sljit_s16)imma);
279
1.36G
    else if (!(flags & EX86_SHIFT_INS))
280
1.35G
      sljit_unaligned_store_s32(buf_ptr, (sljit_s32)imma);
281
2.94G
  }
282
283
8.14G
  return inst;
284
8.14G
}
285
286
static sljit_s32 emit_vex_instruction(struct sljit_compiler *compiler, sljit_uw op,
287
  /* The first and second register operand. */
288
  sljit_s32 a, sljit_s32 v,
289
  /* The general operand (not immediate). */
290
  sljit_s32 b, sljit_sw immb)
291
0
{
292
0
  sljit_u8 *inst;
293
0
  sljit_u8 vex = 0;
294
0
  sljit_u8 vex_m = 0;
295
0
  sljit_uw size;
296
297
0
  SLJIT_ASSERT(((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
298
0
      & ((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
299
300
0
  op |= EX86_REX;
301
302
0
  if (op & VEX_OP_0F38)
303
0
    vex_m = 0x2;
304
0
  else if (op & VEX_OP_0F3A)
305
0
    vex_m = 0x3;
306
307
0
  if ((op & VEX_W) || ((op & VEX_AUTO_W) && !compiler->mode32)) {
308
0
    if (vex_m == 0)
309
0
      vex_m = 0x1;
310
311
0
    vex |= 0x80;
312
0
  }
313
314
0
  if (op & EX86_PREF_66)
315
0
    vex |= 0x1;
316
0
  else if (op & EX86_PREF_F2)
317
0
    vex |= 0x3;
318
0
  else if (op & EX86_PREF_F3)
319
0
    vex |= 0x2;
320
321
0
  op &= ~(EX86_PREF_66 | EX86_PREF_F2 | EX86_PREF_F3);
322
323
0
  if (op & VEX_256)
324
0
    vex |= 0x4;
325
326
0
  vex = U8(vex | ((((op & VEX_SSE2_OPV) ? freg_map[v] : reg_map[v]) ^ 0xf) << 3));
327
328
0
  size = op & ~(sljit_uw)0xff;
329
0
  size |= (vex_m == 0) ? (EX86_VEX_EXT | 2) : 3;
330
331
0
  inst = emit_x86_instruction(compiler, size, a, 0, b, immb);
332
0
  FAIL_IF(!inst);
333
334
0
  SLJIT_ASSERT((inst[-1] & 0xf0) == REX);
335
336
  /* If X or B is present in REX prefix. */
337
0
  if (vex_m == 0 && inst[-1] & 0x3)
338
0
    vex_m = 0x1;
339
340
0
  if (vex_m == 0) {
341
0
    vex |= U8(((inst[-1] >> 2) ^ 0x1) << 7);
342
343
0
    inst[-1] = 0xc5;
344
0
    inst[0] = vex;
345
0
    inst[1] = U8(op);
346
0
    return SLJIT_SUCCESS;
347
0
  }
348
349
0
  vex_m |= U8((inst[-1] ^ 0x7) << 5);
350
0
  inst[-1] = 0xc4;
351
0
  inst[0] = vex_m;
352
0
  inst[1] = vex;
353
0
  inst[2] = U8(op);
354
0
  return SLJIT_SUCCESS;
355
0
}
356
357
/* --------------------------------------------------------------------- */
358
/*  Enter / return                                                       */
359
/* --------------------------------------------------------------------- */
360
361
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr)
362
108M
{
363
108M
  sljit_uw type = jump->flags >> TYPE_SHIFT;
364
365
108M
  int short_addr = ((jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR)) == JUMP_ADDR) && (jump->u.target <= 0xffffffff);
366
367
  /* The relative jump below specialized for this case. */
368
108M
  SLJIT_ASSERT(reg_map[TMP_REG2] >= 8 && TMP_REG2 != SLJIT_TMP_DEST_REG);
369
370
108M
  if (type < SLJIT_JUMP) {
371
    /* Invert type. */
372
0
    code_ptr[0] = U8(get_jump_code(type ^ 0x1) - 0x10);
373
0
    code_ptr[1] = short_addr ? (6 + 3) : (10 + 3);
374
0
    code_ptr += 2;
375
0
  }
376
377
108M
  code_ptr[0] = short_addr ? REX_B : (REX_W | REX_B);
378
108M
  code_ptr[1] = MOV_r_i32 | reg_lmap[TMP_REG2];
379
108M
  code_ptr += 2;
380
108M
  jump->addr = (sljit_uw)code_ptr;
381
382
108M
  if (!(jump->flags & JUMP_ADDR))
383
0
    jump->flags |= PATCH_MD;
384
108M
  else if (short_addr)
385
0
    sljit_unaligned_store_s32(code_ptr, (sljit_s32)jump->u.target);
386
108M
  else
387
108M
    sljit_unaligned_store_sw(code_ptr, (sljit_sw)jump->u.target);
388
389
108M
  code_ptr += short_addr ? sizeof(sljit_s32) : sizeof(sljit_sw);
390
391
108M
  code_ptr[0] = REX_B;
392
108M
  code_ptr[1] = GROUP_FF;
393
108M
  code_ptr[2] = U8(MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2]);
394
395
108M
  return code_ptr + 3;
396
108M
}
397
398
static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
399
7.55M
{
400
7.55M
  sljit_uw addr;
401
7.55M
  sljit_sw diff;
402
7.55M
  SLJIT_UNUSED_ARG(executable_offset);
403
404
7.55M
  SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) <= 10);
405
7.55M
  if (jump->flags & JUMP_ADDR)
406
0
    addr = jump->u.target;
407
7.55M
  else
408
7.55M
    addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + jump->u.label->size;
409
410
7.55M
  if (addr > 0xffffffffl) {
411
7.55M
    diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
412
413
7.55M
    if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN) {
414
7.55M
      SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 7);
415
7.55M
      code_ptr -= SSIZE_OF(s32) - 1;
416
417
7.55M
      SLJIT_ASSERT((code_ptr[-3 - SSIZE_OF(s32)] & 0xf8) == REX_W);
418
7.55M
      SLJIT_ASSERT((code_ptr[-2 - SSIZE_OF(s32)] & 0xf8) == MOV_r_i32);
419
420
7.55M
      code_ptr[-3 - SSIZE_OF(s32)] = U8(REX_W | ((code_ptr[-3 - SSIZE_OF(s32)] & 0x1) << 2));
421
7.55M
      code_ptr[-1 - SSIZE_OF(s32)] = U8(((code_ptr[-2 - SSIZE_OF(s32)] & 0x7) << 3) | 0x5);
422
7.55M
      code_ptr[-2 - SSIZE_OF(s32)] = LEA_r_m;
423
424
7.55M
      jump->flags |= PATCH_MW;
425
7.55M
      return code_ptr;
426
7.55M
    }
427
428
0
    jump->flags |= PATCH_MD;
429
0
    return code_ptr;
430
7.55M
  }
431
432
0
  code_ptr -= 2 + sizeof(sljit_uw);
433
434
0
  SLJIT_ASSERT((code_ptr[0] & 0xf8) == REX_W);
435
0
  SLJIT_ASSERT((code_ptr[1] & 0xf8) == MOV_r_i32);
436
437
0
  if ((code_ptr[0] & 0x07) != 0) {
438
0
    SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 6);
439
0
    code_ptr[0] = U8(code_ptr[0] & ~0x08);
440
0
    code_ptr += 2 + sizeof(sljit_s32);
441
0
  } else {
442
0
    SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 5);
443
0
    code_ptr[0] = code_ptr[1];
444
0
    code_ptr += 1 + sizeof(sljit_s32);
445
0
  }
446
447
0
  return code_ptr;
448
7.55M
}
449
450
#ifdef _WIN64
451
typedef struct {
452
  sljit_sw regs[2];
453
} sljit_sse2_reg;
454
#endif /* _WIN64 */
455
456
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
457
  sljit_s32 options, sljit_s32 arg_types,
458
  sljit_s32 scratches, sljit_s32 saveds, sljit_s32 local_size)
459
580k
{
460
580k
  sljit_uw size;
461
580k
  sljit_s32 word_arg_count = 0;
462
580k
  sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
463
580k
  sljit_s32 saved_regs_size, tmp, i;
464
#ifdef _WIN64
465
  sljit_s32 fscratches;
466
  sljit_s32 fsaveds;
467
  sljit_s32 saved_float_regs_size;
468
  sljit_s32 saved_float_regs_offset = 0;
469
  sljit_s32 float_arg_count = 0;
470
#endif /* _WIN64 */
471
580k
  sljit_u8 *inst;
472
473
580k
  CHECK_ERROR();
474
580k
  CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, local_size));
475
580k
  set_emit_enter(compiler, options, arg_types, scratches, saveds, local_size);
476
477
580k
  scratches = ENTER_GET_REGS(scratches);
478
#ifdef _WIN64
479
  saveds = ENTER_GET_REGS(saveds);
480
  fscratches = compiler->fscratches;
481
  fsaveds = compiler->fsaveds;
482
#endif /* _WIN64 */
483
484
580k
  if (options & SLJIT_ENTER_REG_ARG)
485
0
    arg_types = 0;
486
487
  /* Emit ENDBR64 at function entry if needed.  */
488
580k
  FAIL_IF(emit_endbranch(compiler));
489
490
580k
  compiler->mode32 = 0;
491
492
  /* Including the return address saved by the call instruction. */
493
580k
  saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
494
495
580k
  tmp = SLJIT_S0 - saveds;
496
3.48M
  for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--) {
497
2.90M
    size = reg_map[i] >= 8 ? 2 : 1;
498
2.90M
    inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
499
2.90M
    FAIL_IF(!inst);
500
2.90M
    INC_SIZE(size);
501
2.90M
    if (reg_map[i] >= 8)
502
1.74M
      *inst++ = REX_B;
503
2.90M
    PUSH_REG(reg_lmap[i]);
504
2.90M
  }
505
506
580k
  for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
507
0
    size = reg_map[i] >= 8 ? 2 : 1;
508
0
    inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
509
0
    FAIL_IF(!inst);
510
0
    INC_SIZE(size);
511
0
    if (reg_map[i] >= 8)
512
0
      *inst++ = REX_B;
513
0
    PUSH_REG(reg_lmap[i]);
514
0
  }
515
516
#ifdef _WIN64
517
  local_size += SLJIT_LOCALS_OFFSET;
518
  saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
519
520
  if (saved_float_regs_size > 0) {
521
    saved_float_regs_offset = ((local_size + 0xf) & ~0xf);
522
    local_size = saved_float_regs_offset + saved_float_regs_size;
523
  }
524
#else /* !_WIN64 */
525
580k
  SLJIT_ASSERT(SLJIT_LOCALS_OFFSET == 0);
526
580k
#endif /* _WIN64 */
527
528
580k
  arg_types >>= SLJIT_ARG_SHIFT;
529
530
1.16M
  while (arg_types > 0) {
531
580k
    if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
532
580k
      tmp = 0;
533
580k
#ifndef _WIN64
534
580k
      switch (word_arg_count) {
535
580k
      case 0:
536
580k
        tmp = SLJIT_R2;
537
580k
        break;
538
0
      case 1:
539
0
        tmp = SLJIT_R1;
540
0
        break;
541
0
      case 2:
542
0
        tmp = TMP_REG1;
543
0
        break;
544
0
      default:
545
0
        tmp = SLJIT_R3;
546
0
        break;
547
580k
      }
548
#else /* !_WIN64 */
549
      switch (word_arg_count + float_arg_count) {
550
      case 0:
551
        tmp = SLJIT_R3;
552
        break;
553
      case 1:
554
        tmp = SLJIT_R1;
555
        break;
556
      case 2:
557
        tmp = SLJIT_R2;
558
        break;
559
      default:
560
        tmp = TMP_REG1;
561
        break;
562
      }
563
#endif /* _WIN64 */
564
580k
      if (arg_types & SLJIT_ARG_TYPE_SCRATCH_REG) {
565
0
        if (tmp != SLJIT_R0 + word_arg_count)
566
0
          EMIT_MOV(compiler, SLJIT_R0 + word_arg_count, 0, tmp, 0);
567
580k
      } else {
568
580k
        EMIT_MOV(compiler, SLJIT_S0 - saved_arg_count, 0, tmp, 0);
569
580k
        saved_arg_count++;
570
580k
      }
571
580k
      word_arg_count++;
572
580k
    } else {
573
#ifdef _WIN64
574
      SLJIT_COMPILE_ASSERT(SLJIT_FR0 == 1, float_register_index_start);
575
      float_arg_count++;
576
      if (float_arg_count != float_arg_count + word_arg_count)
577
        FAIL_IF(emit_sse2_load(compiler, (arg_types & SLJIT_ARG_MASK) == SLJIT_ARG_TYPE_F32,
578
          float_arg_count, float_arg_count + word_arg_count, 0));
579
#endif /* _WIN64 */
580
0
    }
581
580k
    arg_types >>= SLJIT_ARG_SHIFT;
582
580k
  }
583
584
580k
  local_size = ((local_size + saved_regs_size + 0xf) & ~0xf) - saved_regs_size;
585
580k
  compiler->local_size = local_size;
586
587
#ifdef _WIN64
588
  if (local_size > 0) {
589
    if (local_size <= 4 * 4096) {
590
      if (local_size > 4096)
591
        EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096);
592
      if (local_size > 2 * 4096)
593
        EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 2);
594
      if (local_size > 3 * 4096)
595
        EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 3);
596
    }
597
    else {
598
      EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, local_size >> 12);
599
600
      EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_MEM1(SLJIT_SP), -4096);
601
      BINARY_IMM32(SUB, 4096, SLJIT_SP, 0);
602
      BINARY_IMM32(SUB, 1, TMP_REG1, 0);
603
604
      inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
605
      FAIL_IF(!inst);
606
607
      INC_SIZE(2);
608
      inst[0] = JNE_i8;
609
      inst[1] = (sljit_u8)-21;
610
      local_size &= 0xfff;
611
    }
612
613
    if (local_size > 0)
614
      EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -local_size);
615
  }
616
#endif /* _WIN64 */
617
618
580k
  if (local_size > 0)
619
580k
    BINARY_IMM32(SUB, local_size, SLJIT_SP, 0);
620
621
#ifdef _WIN64
622
  if (saved_float_regs_size > 0) {
623
    compiler->mode32 = 1;
624
625
    tmp = SLJIT_FS0 - fsaveds;
626
    for (i = SLJIT_FS0; i > tmp; i--) {
627
      FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
628
      saved_float_regs_offset += 16;
629
    }
630
631
    for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
632
      FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
633
      saved_float_regs_offset += 16;
634
    }
635
  }
636
#endif /* _WIN64 */
637
638
580k
  return SLJIT_SUCCESS;
639
580k
}
640
641
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
642
  sljit_s32 options, sljit_s32 arg_types,
643
  sljit_s32 scratches, sljit_s32 saveds, sljit_s32 local_size)
644
0
{
645
0
  sljit_s32 saved_regs_size;
646
0
#ifdef _WIN64
647
0
  sljit_s32 fscratches;
648
0
  sljit_s32 fsaveds;
649
0
  sljit_s32 saved_float_regs_size;
650
0
#endif /* _WIN64 */
651
0
652
0
  CHECK_ERROR();
653
0
  CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, local_size));
654
0
  set_emit_enter(compiler, options, arg_types, scratches, saveds, local_size);
655
0
656
0
  scratches = ENTER_GET_REGS(scratches);
657
0
658
0
#ifdef _WIN64
659
0
  saveds = ENTER_GET_REGS(saveds);
660
0
  fscratches = compiler->fscratches;
661
0
  fsaveds = compiler->fsaveds;
662
0
663
0
  local_size += SLJIT_LOCALS_OFFSET;
664
0
  saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
665
0
666
0
  if (saved_float_regs_size > 0)
667
0
    local_size = ((local_size + 0xf) & ~0xf) + saved_float_regs_size;
668
0
#else /* !_WIN64 */
669
0
  SLJIT_ASSERT(SLJIT_LOCALS_OFFSET == 0);
670
0
#endif /* _WIN64 */
671
0
672
0
  /* Including the return address saved by the call instruction. */
673
0
  saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
674
0
  compiler->local_size = ((local_size + saved_regs_size + 0xf) & ~0xf) - saved_regs_size;
675
0
  return SLJIT_SUCCESS;
676
0
}
677
678
static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 is_return_to)
679
580k
{
680
580k
  sljit_uw size;
681
580k
  sljit_s32 local_size, i, tmp;
682
580k
  sljit_u8 *inst;
683
#ifdef _WIN64
684
  sljit_s32 saved_float_regs_offset;
685
  sljit_s32 fscratches = compiler->fscratches;
686
  sljit_s32 fsaveds = compiler->fsaveds;
687
#endif /* _WIN64 */
688
689
#ifdef _WIN64
690
  saved_float_regs_offset = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
691
692
  if (saved_float_regs_offset > 0) {
693
    compiler->mode32 = 1;
694
    saved_float_regs_offset = (compiler->local_size - saved_float_regs_offset) & ~0xf;
695
696
    tmp = SLJIT_FS0 - fsaveds;
697
    for (i = SLJIT_FS0; i > tmp; i--) {
698
      FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
699
      saved_float_regs_offset += 16;
700
    }
701
702
    for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
703
      FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
704
      saved_float_regs_offset += 16;
705
    }
706
707
    compiler->mode32 = 0;
708
  }
709
#endif /* _WIN64 */
710
711
580k
  local_size = compiler->local_size;
712
713
580k
  if (is_return_to && compiler->scratches < SLJIT_FIRST_SAVED_REG && (compiler->saveds == SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
714
0
    local_size += SSIZE_OF(sw);
715
0
    is_return_to = 0;
716
0
  }
717
718
580k
  if (local_size > 0)
719
580k
    BINARY_IMM32(ADD, local_size, SLJIT_SP, 0);
720
721
580k
  tmp = compiler->scratches;
722
580k
  for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
723
0
    size = reg_map[i] >= 8 ? 2 : 1;
724
0
    inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
725
0
    FAIL_IF(!inst);
726
0
    INC_SIZE(size);
727
0
    if (reg_map[i] >= 8)
728
0
      *inst++ = REX_B;
729
0
    POP_REG(reg_lmap[i]);
730
0
  }
731
732
580k
  tmp = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options);
733
3.48M
  for (i = SLJIT_S0 + 1 - compiler->saveds; i <= tmp; i++) {
734
2.90M
    size = reg_map[i] >= 8 ? 2 : 1;
735
2.90M
    inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
736
2.90M
    FAIL_IF(!inst);
737
2.90M
    INC_SIZE(size);
738
2.90M
    if (reg_map[i] >= 8)
739
1.74M
      *inst++ = REX_B;
740
2.90M
    POP_REG(reg_lmap[i]);
741
2.90M
  }
742
743
580k
  if (is_return_to)
744
0
    BINARY_IMM32(ADD, sizeof(sljit_sw), SLJIT_SP, 0);
745
746
580k
  return SLJIT_SUCCESS;
747
580k
}
748
749
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
750
580k
{
751
580k
  CHECK_ERROR();
752
580k
  CHECK(check_sljit_emit_return_void(compiler));
753
754
580k
  compiler->mode32 = 0;
755
756
580k
  FAIL_IF(emit_stack_frame_release(compiler, 0));
757
580k
  return emit_byte(compiler, RET_near);
758
580k
}
759
760
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
761
  sljit_s32 src, sljit_sw srcw)
762
0
{
763
0
  CHECK_ERROR();
764
0
  CHECK(check_sljit_emit_return_to(compiler, src, srcw));
765
0
766
0
  compiler->mode32 = 0;
767
0
768
0
  if ((src & SLJIT_MEM) || (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options)))) {
769
0
    ADJUST_LOCAL_OFFSET(src, srcw);
770
0
771
0
    EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
772
0
    src = TMP_REG2;
773
0
    srcw = 0;
774
0
  }
775
0
776
0
  FAIL_IF(emit_stack_frame_release(compiler, 1));
777
0
778
0
  SLJIT_SKIP_CHECKS(compiler);
779
0
  return sljit_emit_ijump(compiler, SLJIT_JUMP, src, srcw);
780
0
}
781
782
/* --------------------------------------------------------------------- */
783
/*  Call / return instructions                                           */
784
/* --------------------------------------------------------------------- */
785
786
#ifndef _WIN64
787
788
static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src_ptr)
789
108M
{
790
108M
  sljit_s32 src = src_ptr ? (*src_ptr) : 0;
791
108M
  sljit_s32 word_arg_count = 0;
792
793
108M
  SLJIT_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R3] == 1 && reg_map[TMP_REG1] == 2);
794
108M
  SLJIT_ASSERT(!(src & SLJIT_MEM));
795
796
  /* Remove return value. */
797
108M
  arg_types >>= SLJIT_ARG_SHIFT;
798
799
423M
  while (arg_types) {
800
314M
    if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64)
801
314M
      word_arg_count++;
802
314M
    arg_types >>= SLJIT_ARG_SHIFT;
803
314M
  }
804
805
108M
  if (word_arg_count == 0)
806
0
    return SLJIT_SUCCESS;
807
808
108M
  if (word_arg_count >= 3 || src == SLJIT_R2) {
809
97.1M
    if (src == SLJIT_R2)
810
0
      *src_ptr = TMP_REG1;
811
97.1M
    EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R2, 0);
812
97.1M
  }
813
814
108M
  return emit_mov(compiler, SLJIT_R2, 0, SLJIT_R0, 0);
815
108M
}
816
817
#else
818
819
static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src_ptr)
820
{
821
  sljit_s32 src = src_ptr ? (*src_ptr) : 0;
822
  sljit_s32 arg_count = 0;
823
  sljit_s32 word_arg_count = 0;
824
  sljit_s32 float_arg_count = 0;
825
  sljit_s32 types = 0;
826
  sljit_s32 data_trandfer = 0;
827
  static sljit_u8 word_arg_regs[5] = { 0, SLJIT_R3, SLJIT_R1, SLJIT_R2, TMP_REG1 };
828
829
  SLJIT_ASSERT(reg_map[SLJIT_R3] == 1 && reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R2] == 8 && reg_map[TMP_REG1] == 9);
830
  SLJIT_ASSERT(!(src & SLJIT_MEM));
831
832
  arg_types >>= SLJIT_ARG_SHIFT;
833
834
  while (arg_types) {
835
    types = (types << SLJIT_ARG_SHIFT) | (arg_types & SLJIT_ARG_MASK);
836
837
    switch (arg_types & SLJIT_ARG_MASK) {
838
    case SLJIT_ARG_TYPE_F64:
839
    case SLJIT_ARG_TYPE_F32:
840
      arg_count++;
841
      float_arg_count++;
842
843
      if (arg_count != float_arg_count)
844
        data_trandfer = 1;
845
      break;
846
    default:
847
      arg_count++;
848
      word_arg_count++;
849
850
      if (arg_count != word_arg_count || arg_count != word_arg_regs[arg_count]) {
851
        data_trandfer = 1;
852
853
        if (src == word_arg_regs[arg_count]) {
854
          EMIT_MOV(compiler, TMP_REG2, 0, src, 0);
855
          *src_ptr = TMP_REG2;
856
        }
857
      }
858
      break;
859
    }
860
861
    arg_types >>= SLJIT_ARG_SHIFT;
862
  }
863
864
  if (!data_trandfer)
865
    return SLJIT_SUCCESS;
866
867
  while (types) {
868
    switch (types & SLJIT_ARG_MASK) {
869
    case SLJIT_ARG_TYPE_F64:
870
      if (arg_count != float_arg_count)
871
        FAIL_IF(emit_sse2_load(compiler, 0, arg_count, float_arg_count, 0));
872
      arg_count--;
873
      float_arg_count--;
874
      break;
875
    case SLJIT_ARG_TYPE_F32:
876
      if (arg_count != float_arg_count)
877
        FAIL_IF(emit_sse2_load(compiler, 1, arg_count, float_arg_count, 0));
878
      arg_count--;
879
      float_arg_count--;
880
      break;
881
    default:
882
      if (arg_count != word_arg_count || arg_count != word_arg_regs[arg_count])
883
        EMIT_MOV(compiler, word_arg_regs[arg_count], 0, word_arg_count, 0);
884
      arg_count--;
885
      word_arg_count--;
886
      break;
887
    }
888
889
    types >>= SLJIT_ARG_SHIFT;
890
  }
891
892
  return SLJIT_SUCCESS;
893
}
894
895
#endif
896
897
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
898
  sljit_s32 arg_types)
899
0
{
900
0
  CHECK_ERROR_PTR();
901
0
  CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
902
0
903
0
  compiler->mode32 = 0;
904
0
905
0
  if ((type & 0xff) != SLJIT_CALL_REG_ARG)
906
0
    PTR_FAIL_IF(call_with_args(compiler, arg_types, NULL));
907
0
908
0
  if (type & SLJIT_CALL_RETURN) {
909
0
    PTR_FAIL_IF(emit_stack_frame_release(compiler, 0));
910
0
    type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
911
0
  }
912
0
913
0
  SLJIT_SKIP_CHECKS(compiler);
914
0
  return sljit_emit_jump(compiler, type);
915
0
}
916
917
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
918
  sljit_s32 arg_types,
919
  sljit_s32 src, sljit_sw srcw)
920
108M
{
921
108M
  CHECK_ERROR();
922
108M
  CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
923
924
108M
  compiler->mode32 = 0;
925
926
108M
  if (src & SLJIT_MEM) {
927
0
    ADJUST_LOCAL_OFFSET(src, srcw);
928
0
    EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
929
0
    src = TMP_REG2;
930
0
  }
931
932
108M
  if (type & SLJIT_CALL_RETURN) {
933
0
    if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
934
0
      EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
935
0
      src = TMP_REG2;
936
0
    }
937
938
0
    FAIL_IF(emit_stack_frame_release(compiler, 0));
939
0
  }
940
941
108M
  if ((type & 0xff) != SLJIT_CALL_REG_ARG)
942
108M
    FAIL_IF(call_with_args(compiler, arg_types, &src));
943
944
108M
  if (type & SLJIT_CALL_RETURN)
945
0
    type = SLJIT_JUMP;
946
947
108M
  SLJIT_SKIP_CHECKS(compiler);
948
108M
  return sljit_emit_ijump(compiler, type, src, srcw);
949
108M
}
950
951
static sljit_s32 emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
952
1.25M
{
953
1.25M
  sljit_u8 *inst;
954
955
1.25M
  if (FAST_IS_REG(dst)) {
956
615k
    if (reg_map[dst] < 8)
957
387k
      return emit_byte(compiler, U8(POP_r + reg_lmap[dst]));
958
959
227k
    inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
960
227k
    FAIL_IF(!inst);
961
227k
    INC_SIZE(2);
962
227k
    *inst++ = REX_B;
963
227k
    POP_REG(reg_lmap[dst]);
964
227k
    return SLJIT_SUCCESS;
965
227k
  }
966
967
  /* REX_W is not necessary (src is not immediate). */
968
636k
  compiler->mode32 = 1;
969
636k
  inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
970
636k
  FAIL_IF(!inst);
971
636k
  *inst = POP_rm;
972
636k
  return SLJIT_SUCCESS;
973
636k
}
974
975
static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
976
1.32M
{
977
1.32M
  sljit_u8 *inst;
978
979
1.32M
  if (FAST_IS_REG(src)) {
980
1.32M
    if (reg_map[src] < 8) {
981
1.04M
      inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
982
1.04M
      FAIL_IF(!inst);
983
984
1.04M
      INC_SIZE(1 + 1);
985
1.04M
      PUSH_REG(reg_lmap[src]);
986
1.04M
    }
987
284k
    else {
988
284k
      inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 1);
989
284k
      FAIL_IF(!inst);
990
991
284k
      INC_SIZE(2 + 1);
992
284k
      *inst++ = REX_B;
993
284k
      PUSH_REG(reg_lmap[src]);
994
284k
    }
995
1.32M
  }
996
0
  else {
997
    /* REX_W is not necessary (src is not immediate). */
998
0
    compiler->mode32 = 1;
999
0
    inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
1000
0
    FAIL_IF(!inst);
1001
0
    inst[0] = GROUP_FF;
1002
0
    inst[1] |= PUSH_rm;
1003
1004
0
    inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1005
0
    FAIL_IF(!inst);
1006
0
    INC_SIZE(1);
1007
0
  }
1008
1009
1.32M
  RET();
1010
1.32M
  return SLJIT_SUCCESS;
1011
1.32M
}
1012
1013
static sljit_s32 sljit_emit_get_return_address(struct sljit_compiler *compiler,
1014
  sljit_s32 dst, sljit_sw dstw)
1015
0
{
1016
0
  sljit_s32 saved_regs_size;
1017
1018
0
  compiler->mode32 = 0;
1019
0
  saved_regs_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 0);
1020
0
  return emit_mov(compiler, dst, dstw, SLJIT_MEM1(SLJIT_SP), compiler->local_size + saved_regs_size);
1021
0
}
1022
1023
/* --------------------------------------------------------------------- */
1024
/*  Other operations                                                     */
1025
/* --------------------------------------------------------------------- */
1026
1027
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
1028
  sljit_s32 dst_reg,
1029
  sljit_s32 src1, sljit_sw src1w,
1030
  sljit_s32 src2_reg)
1031
656M
{
1032
656M
  sljit_u8* inst;
1033
1034
656M
  CHECK_ERROR();
1035
656M
  CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
1036
1037
656M
  ADJUST_LOCAL_OFFSET(src1, src1w);
1038
1039
656M
  compiler->mode32 = type & SLJIT_32;
1040
1041
656M
  if (type & SLJIT_COMPARE_SELECT) {
1042
0
    if (!FAST_IS_REG(src1)) {
1043
0
      EMIT_MOV(compiler, TMP_REG2, 0, src1, src1w);
1044
0
      src1 = TMP_REG2;
1045
0
      src1w = 0;
1046
0
    }
1047
1048
0
    inst = emit_x86_instruction(compiler, 1, src1, 0, src2_reg, 0);
1049
0
    FAIL_IF(!inst);
1050
0
    *inst = CMP_r_rm;
1051
0
  }
1052
1053
656M
  type &= ~(SLJIT_32 | SLJIT_COMPARE_SELECT);
1054
1055
656M
  if (dst_reg != src2_reg) {
1056
0
    if (dst_reg == src1) {
1057
0
      src1 = src2_reg;
1058
0
      src1w = 0;
1059
0
      type ^= 0x1;
1060
0
    } else if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
1061
0
      EMIT_MOV(compiler, dst_reg, 0, src1, src1w);
1062
0
      src1 = src2_reg;
1063
0
      src1w = 0;
1064
0
      type ^= 0x1;
1065
0
    } else
1066
0
      EMIT_MOV(compiler, dst_reg, 0, src2_reg, 0);
1067
0
  }
1068
1069
656M
  if (sljit_has_cpu_feature(SLJIT_HAS_CMOV)) {
1070
656M
    if (SLJIT_UNLIKELY(src1 == SLJIT_IMM)) {
1071
13.6M
      EMIT_MOV(compiler, TMP_REG2, 0, src1, src1w);
1072
13.6M
      src1 = TMP_REG2;
1073
13.6M
      src1w = 0;
1074
13.6M
    }
1075
1076
656M
    return emit_groupf(compiler, U8(get_jump_code((sljit_uw)type) - 0x40), dst_reg, src1, src1w);
1077
656M
  }
1078
1079
0
  return emit_cmov_generic(compiler, type, dst_reg, src1, src1w);
1080
656M
}
1081
1082
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
1083
  sljit_s32 reg,
1084
  sljit_s32 mem, sljit_sw memw)
1085
0
{
1086
0
  sljit_u8* inst;
1087
0
  sljit_s32 i, next, reg_idx;
1088
0
  sljit_u8 regs[2];
1089
0
1090
0
  CHECK_ERROR();
1091
0
  CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
1092
0
1093
0
  if (!(reg & REG_PAIR_MASK))
1094
0
    return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
1095
0
1096
0
  ADJUST_LOCAL_OFFSET(mem, memw);
1097
0
1098
0
  compiler->mode32 = 0;
1099
0
1100
0
  if ((mem & REG_MASK) == 0) {
1101
0
    EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, memw);
1102
0
1103
0
    mem = SLJIT_MEM1(TMP_REG1);
1104
0
    memw = 0;
1105
0
  } else if (!(mem & OFFS_REG_MASK) && ((memw < HALFWORD_MIN) || (memw > HALFWORD_MAX - SSIZE_OF(sw)))) {
1106
0
    EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, memw);
1107
0
1108
0
    mem = SLJIT_MEM2(mem & REG_MASK, TMP_REG1);
1109
0
    memw = 0;
1110
0
  }
1111
0
1112
0
  regs[0] = U8(REG_PAIR_FIRST(reg));
1113
0
  regs[1] = U8(REG_PAIR_SECOND(reg));
1114
0
1115
0
  next = SSIZE_OF(sw);
1116
0
1117
0
  if (!(type & SLJIT_MEM_STORE) && (regs[0] == (mem & REG_MASK) || regs[0] == OFFS_REG(mem))) {
1118
0
    if (regs[1] == (mem & REG_MASK) || regs[1] == OFFS_REG(mem)) {
1119
0
      /* Base and offset cannot be TMP_REG1. */
1120
0
      EMIT_MOV(compiler, TMP_REG1, 0, OFFS_REG(mem), 0);
1121
0
1122
0
      if (regs[1] == OFFS_REG(mem))
1123
0
        next = -SSIZE_OF(sw);
1124
0
1125
0
      mem = (mem & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
1126
0
    } else {
1127
0
      next = -SSIZE_OF(sw);
1128
0
1129
0
      if (!(mem & OFFS_REG_MASK))
1130
0
        memw += SSIZE_OF(sw);
1131
0
    }
1132
0
  }
1133
0
1134
0
  for (i = 0; i < 2; i++) {
1135
0
    reg_idx = next > 0 ? i : (i ^ 0x1);
1136
0
    reg = regs[reg_idx];
1137
0
1138
0
    if ((mem & OFFS_REG_MASK) && (reg_idx == 1)) {
1139
0
      inst = (sljit_u8*)ensure_buf(compiler, (sljit_uw)(1 + 5));
1140
0
      FAIL_IF(!inst);
1141
0
1142
0
      INC_SIZE(5);
1143
0
1144
0
      inst[0] = U8(REX_W | ((reg_map[reg] >= 8) ? REX_R : 0) | ((reg_map[mem & REG_MASK] >= 8) ? REX_B : 0) | ((reg_map[OFFS_REG(mem)] >= 8) ? REX_X : 0));
1145
0
      inst[1] = (type & SLJIT_MEM_STORE) ? MOV_rm_r : MOV_r_rm;
1146
0
      inst[2] = 0x44 | U8(reg_lmap[reg] << 3);
1147
0
      inst[3] = U8(memw << 6) | U8(reg_lmap[OFFS_REG(mem)] << 3) | reg_lmap[mem & REG_MASK];
1148
0
      inst[4] = sizeof(sljit_sw);
1149
0
    } else if (type & SLJIT_MEM_STORE) {
1150
0
      EMIT_MOV(compiler, mem, memw, reg, 0);
1151
0
    } else {
1152
0
      EMIT_MOV(compiler, reg, 0, mem, memw);
1153
0
    }
1154
0
1155
0
    if (!(mem & OFFS_REG_MASK))
1156
0
      memw += next;
1157
0
  }
1158
0
1159
0
  return SLJIT_SUCCESS;
1160
0
}
1161
1162
static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
1163
  sljit_s32 dst, sljit_sw dstw,
1164
  sljit_s32 src, sljit_sw srcw)
1165
705M
{
1166
705M
  sljit_u8* inst;
1167
705M
  sljit_s32 dst_r;
1168
1169
705M
  compiler->mode32 = 0;
1170
1171
705M
  if (src == SLJIT_IMM) {
1172
194M
    if (FAST_IS_REG(dst)) {
1173
0
      if (!sign || ((sljit_u32)srcw <= 0x7fffffff))
1174
0
        return emit_do_imm32(compiler, reg_map[dst] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
1175
1176
0
      inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
1177
0
      FAIL_IF(!inst);
1178
0
      *inst = MOV_rm_i32;
1179
0
      return SLJIT_SUCCESS;
1180
0
    }
1181
194M
    compiler->mode32 = 1;
1182
194M
    inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
1183
194M
    FAIL_IF(!inst);
1184
194M
    *inst = MOV_rm_i32;
1185
194M
    compiler->mode32 = 0;
1186
194M
    return SLJIT_SUCCESS;
1187
194M
  }
1188
1189
511M
  dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1190
1191
511M
  if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1192
97.1M
    dst_r = src;
1193
413M
  else {
1194
413M
    if (sign) {
1195
160M
      inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw);
1196
160M
      FAIL_IF(!inst);
1197
160M
      *inst = MOVSXD_r_rm;
1198
253M
    } else {
1199
253M
      compiler->mode32 = 1;
1200
253M
      EMIT_MOV(compiler, dst_r, 0, src, srcw);
1201
253M
      compiler->mode32 = 0;
1202
253M
    }
1203
413M
  }
1204
1205
511M
  if (dst & SLJIT_MEM) {
1206
97.1M
    compiler->mode32 = 1;
1207
97.1M
    inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
1208
97.1M
    FAIL_IF(!inst);
1209
97.1M
    *inst = MOV_rm_r;
1210
97.1M
    compiler->mode32 = 0;
1211
97.1M
  }
1212
1213
511M
  return SLJIT_SUCCESS;
1214
511M
}
1215
1216
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
1217
  sljit_s32 dst, sljit_sw dstw,
1218
  sljit_s32 src, sljit_sw srcw)
1219
0
{
1220
0
  sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
1221
0
  sljit_u8 *inst, *jump_inst1, *jump_inst2;
1222
0
  sljit_uw size1, size2;
1223
1224
0
  compiler->mode32 = 0;
1225
1226
0
  if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
1227
0
    if (src != SLJIT_IMM) {
1228
0
      compiler->mode32 = 1;
1229
0
      EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1230
0
      compiler->mode32 = 0;
1231
0
    } else
1232
0
      FAIL_IF(emit_do_imm32(compiler, reg_map[TMP_REG1] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[TMP_REG1]), srcw));
1233
1234
0
    FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
1235
1236
0
    compiler->mode32 = 1;
1237
1238
0
    if (dst_r == TMP_FREG)
1239
0
      return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
1240
0
    return SLJIT_SUCCESS;
1241
0
  }
1242
1243
0
  if (!FAST_IS_REG(src)) {
1244
0
    EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1245
0
    src = TMP_REG1;
1246
0
  }
1247
1248
0
  BINARY_IMM32(CMP, 0, src, 0);
1249
1250
0
  inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1251
0
  FAIL_IF(!inst);
1252
0
  INC_SIZE(2);
1253
0
  inst[0] = JL_i8;
1254
0
  jump_inst1 = inst;
1255
1256
0
  size1 = compiler->size;
1257
1258
0
  compiler->mode32 = 0;
1259
0
  FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, 0));
1260
1261
0
  inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1262
0
  FAIL_IF(!inst);
1263
0
  INC_SIZE(2);
1264
0
  inst[0] = JMP_i8;
1265
0
  jump_inst2 = inst;
1266
1267
0
  size2 = compiler->size;
1268
1269
0
  jump_inst1[1] = U8(size2 - size1);
1270
1271
0
  if (src != TMP_REG1)
1272
0
    EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1273
1274
0
  EMIT_MOV(compiler, TMP_REG2, 0, src, 0);
1275
1276
0
  inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
1277
0
  FAIL_IF(!inst);
1278
0
  inst[1] |= SHR;
1279
1280
0
  compiler->mode32 = 1;
1281
0
  BINARY_IMM32(AND, 1, TMP_REG2, 0);
1282
1283
0
  compiler->mode32 = 0;
1284
0
  inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG2, 0);
1285
0
  FAIL_IF(!inst);
1286
0
  inst[0] = OR_r_rm;
1287
1288
0
  FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
1289
0
  compiler->mode32 = 1;
1290
0
  FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, dst_r, 0));
1291
1292
0
  jump_inst2[1] = U8(compiler->size - size2);
1293
1294
0
  if (dst_r == TMP_FREG)
1295
0
    return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
1296
0
  return SLJIT_SUCCESS;
1297
0
}
1298
1299
static sljit_s32 sljit_emit_fset(struct sljit_compiler *compiler,
1300
  sljit_s32 freg, sljit_u8 rex, sljit_s32 is_zero)
1301
0
{
1302
0
  sljit_u8 *inst;
1303
0
  sljit_u32 size;
1304
0
1305
0
  if (is_zero) {
1306
0
    rex = freg_map[freg] >= 8 ? (REX_R | REX_B) : 0;
1307
0
  } else {
1308
0
    if (freg_map[freg] >= 8)
1309
0
      rex |= REX_R;
1310
0
    if (reg_map[TMP_REG1] >= 8)
1311
0
      rex |= REX_B;
1312
0
  }
1313
0
1314
0
  size = (rex != 0) ? 5 : 4;
1315
0
1316
0
  inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1317
0
  FAIL_IF(!inst);
1318
0
  INC_SIZE(size);
1319
0
1320
0
  *inst++ = GROUP_66;
1321
0
  if (rex != 0)
1322
0
    *inst++ = rex;
1323
0
  inst[0] = GROUP_0F;
1324
0
1325
0
  if (is_zero) {
1326
0
    inst[1] = PXOR_x_xm;
1327
0
    inst[2] = U8(freg_lmap[freg] | (freg_lmap[freg] << 3) | MOD_REG);
1328
0
  } else {
1329
0
    inst[1] = MOVD_x_rm;
1330
0
    inst[2] = U8(reg_lmap[TMP_REG1] | (freg_lmap[freg] << 3) | MOD_REG);
1331
0
  }
1332
0
1333
0
  return SLJIT_SUCCESS;
1334
0
}
1335
1336
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
1337
  sljit_s32 freg, sljit_f32 value)
1338
0
{
1339
0
  union {
1340
0
    sljit_s32 imm;
1341
0
    sljit_f32 value;
1342
0
  } u;
1343
0
1344
0
  CHECK_ERROR();
1345
0
  CHECK(check_sljit_emit_fset32(compiler, freg, value));
1346
0
1347
0
  u.value = value;
1348
0
1349
0
  if (u.imm != 0) {
1350
0
    compiler->mode32 = 1;
1351
0
    EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
1352
0
  }
1353
0
1354
0
  return sljit_emit_fset(compiler, freg, 0, u.imm == 0);
1355
0
}
1356
1357
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
1358
  sljit_s32 freg, sljit_f64 value)
1359
0
{
1360
0
  union {
1361
0
    sljit_sw imm;
1362
0
    sljit_f64 value;
1363
0
  } u;
1364
0
1365
0
  CHECK_ERROR();
1366
0
  CHECK(check_sljit_emit_fset64(compiler, freg, value));
1367
0
1368
0
  u.value = value;
1369
0
1370
0
  if (u.imm != 0) {
1371
0
    compiler->mode32 = 0;
1372
0
    EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
1373
0
  }
1374
0
1375
0
  return sljit_emit_fset(compiler, freg, REX_W, u.imm == 0);
1376
0
}
1377
1378
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
1379
  sljit_s32 freg, sljit_s32 reg)
1380
0
{
1381
0
  sljit_u8 *inst;
1382
0
  sljit_u32 size;
1383
0
  sljit_u8 rex = 0;
1384
0
1385
0
  CHECK_ERROR();
1386
0
  CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
1387
0
1388
0
  if (!(op & SLJIT_32))
1389
0
    rex = REX_W;
1390
0
1391
0
  if (freg_map[freg] >= 8)
1392
0
    rex |= REX_R;
1393
0
1394
0
  if (reg_map[reg] >= 8)
1395
0
    rex |= REX_B;
1396
0
1397
0
  size = (rex != 0) ? 5 : 4;
1398
0
1399
0
  inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1400
0
  FAIL_IF(!inst);
1401
0
  INC_SIZE(size);
1402
0
1403
0
  *inst++ = GROUP_66;
1404
0
  if (rex != 0)
1405
0
    *inst++ = rex;
1406
0
  inst[0] = GROUP_0F;
1407
0
  inst[1] = GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x;
1408
0
  inst[2] = U8(reg_lmap[reg] | (freg_lmap[freg] << 3) | MOD_REG);
1409
0
1410
0
  return SLJIT_SUCCESS;
1411
0
}
1412
1413
static sljit_s32 skip_frames_before_return(struct sljit_compiler *compiler)
1414
580k
{
1415
580k
  sljit_s32 tmp, size;
1416
1417
  /* Don't adjust shadow stack if it isn't enabled.  */
1418
580k
  if (!cpu_has_shadow_stack())
1419
580k
    return SLJIT_SUCCESS;
1420
1421
0
  size = compiler->local_size;
1422
0
  tmp = compiler->scratches;
1423
0
  if (tmp >= SLJIT_FIRST_SAVED_REG)
1424
0
    size += (tmp - SLJIT_FIRST_SAVED_REG + 1) * SSIZE_OF(sw);
1425
0
  tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
1426
0
  if (SLJIT_S0 >= tmp)
1427
0
    size += (SLJIT_S0 - tmp + 1) * SSIZE_OF(sw);
1428
1429
0
  return adjust_shadow_stack(compiler, SLJIT_MEM1(SLJIT_SP), size);
1430
580k
}