Coverage Report

Created: 2025-07-23 06:43

/src/gmp/mpz/powm.c
Line
Count
Source (jump to first uncovered line)
1
/* mpz_powm(res,base,exp,mod) -- Set R to (U^E) mod M.
2
3
   Contributed to the GNU project by Torbjorn Granlund.
4
5
Copyright 1991, 1993, 1994, 1996, 1997, 2000-2002, 2005, 2008, 2009,
6
2011, 2012, 2015, 2019 Free Software Foundation, Inc.
7
8
This file is part of the GNU MP Library.
9
10
The GNU MP Library is free software; you can redistribute it and/or modify
11
it under the terms of either:
12
13
  * the GNU Lesser General Public License as published by the Free
14
    Software Foundation; either version 3 of the License, or (at your
15
    option) any later version.
16
17
or
18
19
  * the GNU General Public License as published by the Free Software
20
    Foundation; either version 2 of the License, or (at your option) any
21
    later version.
22
23
or both in parallel, as here.
24
25
The GNU MP Library is distributed in the hope that it will be useful, but
26
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
27
or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
28
for more details.
29
30
You should have received copies of the GNU General Public License and the
31
GNU Lesser General Public License along with the GNU MP Library.  If not,
32
see https://www.gnu.org/licenses/.  */
33
34
35
#include "gmp-impl.h"
36
#include "longlong.h"
37
38
39
/* TODO
40
41
 * Improve handling of buffers.  It is pretty ugly now.
42
43
 * For even moduli, we compute a binvert of its odd part both here and in
44
   mpn_powm.  How can we avoid this recomputation?
45
*/
46
47
/*
48
  b ^ e mod m   res
49
  0   0     0    ?
50
  0   e     0    ?
51
  0   0     m    ?
52
  0   e     m    0
53
  b   0     0    ?
54
  b   e     0    ?
55
  b   0     m    1 mod m
56
  b   e     m    b^e mod m
57
*/
58
59
#define HANDLE_NEGATIVE_EXPONENT 1
60
61
void
62
mpz_powm (mpz_ptr r, mpz_srcptr b, mpz_srcptr e, mpz_srcptr m)
63
50.4k
{
64
50.4k
  mp_size_t n, nodd, ncnt;
65
50.4k
  int cnt;
66
50.4k
  mp_ptr rp, tp;
67
50.4k
  mp_srcptr bp, ep, mp;
68
50.4k
  mp_size_t rn, bn, es, en, itch;
69
50.4k
  mpz_t new_b;      /* note: value lives long via 'b' */
70
50.4k
  TMP_DECL;
71
72
50.4k
  n = ABSIZ(m);
73
50.4k
  if (UNLIKELY (n == 0))
74
0
    DIVIDE_BY_ZERO;
75
76
50.4k
  mp = PTR(m);
77
78
50.4k
  TMP_MARK;
79
80
50.4k
  es = SIZ(e);
81
50.4k
  if (UNLIKELY (es <= 0))
82
108
    {
83
108
      if (es == 0)
84
108
  {
85
    /* b^0 mod m,  b is anything and m is non-zero.
86
       Result is 1 mod m, i.e., 1 or 0 depending on if m = 1.  */
87
108
    SIZ(r) = n != 1 || mp[0] != 1;
88
108
    MPZ_NEWALLOC (r, 1)[0] = 1;
89
108
    TMP_FREE; /* we haven't really allocated anything here */
90
108
    return;
91
108
  }
92
0
#if HANDLE_NEGATIVE_EXPONENT
93
0
      MPZ_TMP_INIT (new_b, n + 1);
94
95
0
      if (UNLIKELY (! mpz_invert (new_b, b, m)))
96
0
  DIVIDE_BY_ZERO;
97
0
      b = new_b;
98
0
      es = -es;
99
#else
100
      DIVIDE_BY_ZERO;
101
#endif
102
0
    }
103
50.3k
  en = es;
104
105
50.3k
  bn = ABSIZ(b);
106
107
50.3k
  if (UNLIKELY (bn == 0))
108
4
    {
109
4
      SIZ(r) = 0;
110
4
      TMP_FREE;
111
4
      return;
112
4
    }
113
114
50.3k
  ep = PTR(e);
115
116
  /* Handle (b^1 mod m) early, since mpn_pow* do not handle that case.  */
117
50.3k
  if (UNLIKELY (en == 1 && ep[0] == 1))
118
93
    {
119
93
      rp = TMP_ALLOC_LIMBS (n);
120
93
      bp = PTR(b);
121
93
      if (bn >= n)
122
42
  {
123
42
    mp_ptr qp = TMP_ALLOC_LIMBS (bn - n + 1);
124
42
    mpn_tdiv_qr (qp, rp, 0L, bp, bn, mp, n);
125
42
    rn = n;
126
42
    MPN_NORMALIZE (rp, rn);
127
128
42
    if (rn != 0 && SIZ(b) < 0)
129
0
      {
130
0
        mpn_sub (rp, mp, n, rp, rn);
131
0
        rn = n;
132
0
        MPN_NORMALIZE_NOT_ZERO (rp, rn);
133
0
      }
134
42
  }
135
51
      else
136
51
  {
137
51
    if (SIZ(b) < 0)
138
0
      {
139
0
        mpn_sub (rp, mp, n, bp, bn);
140
0
        rn = n;
141
0
        MPN_NORMALIZE_NOT_ZERO (rp, rn);
142
0
      }
143
51
    else
144
51
      {
145
51
        MPN_COPY (rp, bp, bn);
146
51
        rn = bn;
147
51
      }
148
51
  }
149
93
      goto ret;
150
93
    }
151
152
  /* Remove low zero limbs from M.  This loop will terminate for correctly
153
     represented mpz numbers.  */
154
50.2k
  ncnt = 0;
155
50.2k
  while (UNLIKELY (mp[0] == 0))
156
170k
    {
157
170k
      mp++;
158
170k
      ncnt++;
159
170k
    }
160
50.2k
  nodd = n - ncnt;
161
50.2k
  cnt = 0;
162
50.2k
  if (mp[0] % 2 == 0)
163
9.26k
    {
164
9.26k
      mp_ptr newmp = TMP_ALLOC_LIMBS (nodd);
165
9.26k
      count_trailing_zeros (cnt, mp[0]);
166
9.26k
      mpn_rshift (newmp, mp, nodd, cnt);
167
9.26k
      nodd -= newmp[nodd - 1] == 0;
168
9.26k
      mp = newmp;
169
9.26k
      ncnt++;
170
9.26k
    }
171
172
50.2k
  if (ncnt != 0)
173
9.86k
    {
174
      /* We will call both mpn_powm and mpn_powlo.  */
175
      /* rp needs n, mpn_powlo needs 4n, the 2 mpn_binvert might need more */
176
9.86k
      mp_size_t n_largest_binvert = MAX (ncnt, nodd);
177
9.86k
      mp_size_t itch_binvert = mpn_binvert_itch (n_largest_binvert);
178
9.86k
      itch = 3 * n + MAX (itch_binvert, 2 * n);
179
9.86k
    }
180
40.4k
  else
181
40.4k
    {
182
      /* We will call just mpn_powm.  */
183
40.4k
      mp_size_t itch_binvert = mpn_binvert_itch (nodd);
184
40.4k
      itch = n + MAX (itch_binvert, 2 * n);
185
40.4k
    }
186
50.2k
  tp = TMP_ALLOC_LIMBS (itch);
187
188
50.2k
  rp = tp;  tp += n;
189
190
50.2k
  bp = PTR(b);
191
50.2k
  mpn_powm (rp, bp, bn, ep, en, mp, nodd, tp);
192
193
50.2k
  rn = n;
194
195
50.2k
  if (ncnt != 0)
196
9.86k
    {
197
9.86k
      mp_ptr r2, xp, yp, odd_inv_2exp;
198
9.86k
      unsigned long t;
199
9.86k
      int bcnt;
200
201
9.86k
      if (bn < ncnt)
202
4.98k
  {
203
4.98k
    mp_ptr newbp = TMP_ALLOC_LIMBS (ncnt);
204
4.98k
    MPN_COPY (newbp, bp, bn);
205
4.98k
    MPN_ZERO (newbp + bn, ncnt - bn);
206
4.98k
    bp = newbp;
207
4.98k
  }
208
209
9.86k
      r2 = tp;
210
211
9.86k
      if (bp[0] % 2 == 0)
212
6.99k
  {
213
6.99k
    if (en > 1)
214
5.76k
      {
215
5.76k
        MPN_ZERO (r2, ncnt);
216
5.76k
        goto zero;
217
5.76k
      }
218
219
1.23k
    ASSERT (en == 1);
220
1.23k
    t = (ncnt - (cnt != 0)) * GMP_NUMB_BITS + cnt;
221
222
    /* Count number of low zero bits in B, up to 3.  */
223
1.23k
    bcnt = (0x1213 >> ((bp[0] & 7) << 1)) & 0x3;
224
    /* Note that ep[0] * bcnt might overflow, but that just results
225
       in a missed optimization.  */
226
1.23k
    if (ep[0] * bcnt >= t)
227
1.20k
      {
228
1.20k
        MPN_ZERO (r2, ncnt);
229
1.20k
        goto zero;
230
1.20k
      }
231
1.23k
  }
232
233
2.89k
      mpn_powlo (r2, bp, ep, en, ncnt, tp + ncnt);
234
235
9.86k
    zero:
236
9.86k
      if (nodd < ncnt)
237
2.56k
  {
238
2.56k
    mp_ptr newmp = TMP_ALLOC_LIMBS (ncnt);
239
2.56k
    MPN_COPY (newmp, mp, nodd);
240
2.56k
    MPN_ZERO (newmp + nodd, ncnt - nodd);
241
2.56k
    mp = newmp;
242
2.56k
  }
243
244
9.86k
      odd_inv_2exp = tp + n;
245
9.86k
      mpn_binvert (odd_inv_2exp, mp, ncnt, tp + 2 * n);
246
247
9.86k
      mpn_sub (r2, r2, ncnt, rp, nodd > ncnt ? ncnt : nodd);
248
249
9.86k
      xp = tp + 2 * n;
250
9.86k
      mpn_mullo_n (xp, odd_inv_2exp, r2, ncnt);
251
252
9.86k
      if (cnt != 0)
253
9.26k
  xp[ncnt - 1] &= (CNST_LIMB(1) << cnt) - 1;
254
255
9.86k
      yp = tp;
256
9.86k
      if (ncnt > nodd)
257
2.56k
  mpn_mul (yp, xp, ncnt, mp, nodd);
258
7.30k
      else
259
7.30k
  mpn_mul (yp, mp, nodd, xp, ncnt);
260
261
9.86k
      mpn_add (rp, yp, n, rp, nodd);
262
263
9.86k
      ASSERT (nodd + ncnt >= n);
264
9.86k
      ASSERT (nodd + ncnt <= n + 1);
265
9.86k
    }
266
267
50.2k
  MPN_NORMALIZE (rp, rn);
268
269
50.2k
  if ((ep[0] & 1) && SIZ(b) < 0 && rn != 0)
270
0
    {
271
0
      mpn_sub (rp, PTR(m), n, rp, rn);
272
0
      rn = n;
273
0
      MPN_NORMALIZE (rp, rn);
274
0
    }
275
276
50.3k
 ret:
277
50.3k
  MPZ_NEWALLOC (r, rn);
278
50.3k
  SIZ(r) = rn;
279
50.3k
  MPN_COPY (PTR(r), rp, rn);
280
281
50.3k
  TMP_FREE;
282
50.3k
}