Bitcoin Core  24.99.0
P2P Digital Currency
field_5x52_int128_impl.h
Go to the documentation of this file.
1 /***********************************************************************
2  * Copyright (c) 2013, 2014 Pieter Wuille *
3  * Distributed under the MIT software license, see the accompanying *
4  * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
5  ***********************************************************************/
6 
7 #ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
8 #define SECP256K1_FIELD_INNER5X52_IMPL_H
9 
10 #include <stdint.h>
11 
12 #include "int128.h"
13 
14 #ifdef VERIFY
15 #define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0)
16 #define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
17 #else
18 #define VERIFY_BITS(x, n) do { } while(0)
19 #define VERIFY_BITS_128(x, n) do { } while(0)
20 #endif
21 
22 SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
23  secp256k1_uint128 c, d;
24  uint64_t t3, t4, tx, u0;
25  uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
26  const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
27 
28  VERIFY_BITS(a[0], 56);
29  VERIFY_BITS(a[1], 56);
30  VERIFY_BITS(a[2], 56);
31  VERIFY_BITS(a[3], 56);
32  VERIFY_BITS(a[4], 52);
33  VERIFY_BITS(b[0], 56);
34  VERIFY_BITS(b[1], 56);
35  VERIFY_BITS(b[2], 56);
36  VERIFY_BITS(b[3], 56);
37  VERIFY_BITS(b[4], 52);
38  VERIFY_CHECK(r != b);
39  VERIFY_CHECK(a != b);
40 
41  /* [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
42  * for 0 <= x <= 4, px is a shorthand for sum(a[i]*b[x-i], i=0..x).
43  * for 4 <= x <= 8, px is a shorthand for sum(a[i]*b[x-i], i=(x-4)..4)
44  * Note that [x 0 0 0 0 0] = [x*R].
45  */
46 
47  secp256k1_u128_mul(&d, a0, b[3]);
48  secp256k1_u128_accum_mul(&d, a1, b[2]);
49  secp256k1_u128_accum_mul(&d, a2, b[1]);
50  secp256k1_u128_accum_mul(&d, a3, b[0]);
51  VERIFY_BITS_128(&d, 114);
52  /* [d 0 0 0] = [p3 0 0 0] */
53  secp256k1_u128_mul(&c, a4, b[4]);
54  VERIFY_BITS_128(&c, 112);
55  /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
57  VERIFY_BITS_128(&d, 115);
58  VERIFY_BITS_128(&c, 48);
59  /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
60  t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
61  VERIFY_BITS(t3, 52);
62  VERIFY_BITS_128(&d, 63);
63  /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
64 
65  secp256k1_u128_accum_mul(&d, a0, b[4]);
66  secp256k1_u128_accum_mul(&d, a1, b[3]);
67  secp256k1_u128_accum_mul(&d, a2, b[2]);
68  secp256k1_u128_accum_mul(&d, a3, b[1]);
69  secp256k1_u128_accum_mul(&d, a4, b[0]);
70  VERIFY_BITS_128(&d, 115);
71  /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
73  VERIFY_BITS_128(&d, 116);
74  /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
75  t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
76  VERIFY_BITS(t4, 52);
77  VERIFY_BITS_128(&d, 64);
78  /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
79  tx = (t4 >> 48); t4 &= (M >> 4);
80  VERIFY_BITS(tx, 4);
81  VERIFY_BITS(t4, 48);
82  /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
83 
84  secp256k1_u128_mul(&c, a0, b[0]);
85  VERIFY_BITS_128(&c, 112);
86  /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
87  secp256k1_u128_accum_mul(&d, a1, b[4]);
88  secp256k1_u128_accum_mul(&d, a2, b[3]);
89  secp256k1_u128_accum_mul(&d, a3, b[2]);
90  secp256k1_u128_accum_mul(&d, a4, b[1]);
91  VERIFY_BITS_128(&d, 115);
92  /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
93  u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
94  VERIFY_BITS(u0, 52);
95  VERIFY_BITS_128(&d, 63);
96  /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97  /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
98  u0 = (u0 << 4) | tx;
99  VERIFY_BITS(u0, 56);
100  /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
101  secp256k1_u128_accum_mul(&c, u0, R >> 4);
102  VERIFY_BITS_128(&c, 115);
103  /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
104  r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
105  VERIFY_BITS(r[0], 52);
106  VERIFY_BITS_128(&c, 61);
107  /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
108 
109  secp256k1_u128_accum_mul(&c, a0, b[1]);
110  secp256k1_u128_accum_mul(&c, a1, b[0]);
111  VERIFY_BITS_128(&c, 114);
112  /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
113  secp256k1_u128_accum_mul(&d, a2, b[4]);
114  secp256k1_u128_accum_mul(&d, a3, b[3]);
115  secp256k1_u128_accum_mul(&d, a4, b[2]);
116  VERIFY_BITS_128(&d, 114);
117  /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
119  VERIFY_BITS_128(&c, 115);
120  VERIFY_BITS_128(&d, 62);
121  /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122  r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
123  VERIFY_BITS(r[1], 52);
124  VERIFY_BITS_128(&c, 63);
125  /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
126 
127  secp256k1_u128_accum_mul(&c, a0, b[2]);
128  secp256k1_u128_accum_mul(&c, a1, b[1]);
129  secp256k1_u128_accum_mul(&c, a2, b[0]);
130  VERIFY_BITS_128(&c, 114);
131  /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
132  secp256k1_u128_accum_mul(&d, a3, b[4]);
133  secp256k1_u128_accum_mul(&d, a4, b[3]);
134  VERIFY_BITS_128(&d, 114);
135  /* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
137  VERIFY_BITS_128(&c, 115);
138  VERIFY_BITS_128(&d, 50);
139  /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
140 
141  r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
142  VERIFY_BITS(r[2], 52);
143  VERIFY_BITS_128(&c, 63);
144  /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
146  secp256k1_u128_accum_u64(&c, t3);
147  VERIFY_BITS_128(&c, 100);
148  /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
149  r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
150  VERIFY_BITS(r[3], 52);
151  VERIFY_BITS_128(&c, 48);
152  /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
153  r[4] = secp256k1_u128_to_u64(&c) + t4;
154  VERIFY_BITS(r[4], 49);
155  /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
156 }
157 
158 SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
159  secp256k1_uint128 c, d;
160  uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
161  int64_t t3, t4, tx, u0;
162  const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
163 
164  VERIFY_BITS(a[0], 56);
165  VERIFY_BITS(a[1], 56);
166  VERIFY_BITS(a[2], 56);
167  VERIFY_BITS(a[3], 56);
168  VERIFY_BITS(a[4], 52);
169 
175  secp256k1_u128_mul(&d, a0*2, a3);
176  secp256k1_u128_accum_mul(&d, a1*2, a2);
177  VERIFY_BITS_128(&d, 114);
178  /* [d 0 0 0] = [p3 0 0 0] */
179  secp256k1_u128_mul(&c, a4, a4);
180  VERIFY_BITS_128(&c, 112);
181  /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
183  VERIFY_BITS_128(&d, 115);
184  VERIFY_BITS_128(&c, 48);
185  /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
186  t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
187  VERIFY_BITS(t3, 52);
188  VERIFY_BITS_128(&d, 63);
189  /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
190 
191  a4 *= 2;
192  secp256k1_u128_accum_mul(&d, a0, a4);
193  secp256k1_u128_accum_mul(&d, a1*2, a3);
194  secp256k1_u128_accum_mul(&d, a2, a2);
195  VERIFY_BITS_128(&d, 115);
196  /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
198  VERIFY_BITS_128(&d, 116);
199  /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
200  t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
201  VERIFY_BITS(t4, 52);
202  VERIFY_BITS_128(&d, 64);
203  /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
204  tx = (t4 >> 48); t4 &= (M >> 4);
205  VERIFY_BITS(tx, 4);
206  VERIFY_BITS(t4, 48);
207  /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
208 
209  secp256k1_u128_mul(&c, a0, a0);
210  VERIFY_BITS_128(&c, 112);
211  /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
212  secp256k1_u128_accum_mul(&d, a1, a4);
213  secp256k1_u128_accum_mul(&d, a2*2, a3);
214  VERIFY_BITS_128(&d, 114);
215  /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
216  u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
217  VERIFY_BITS(u0, 52);
218  VERIFY_BITS_128(&d, 62);
219  /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
220  /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
221  u0 = (u0 << 4) | tx;
222  VERIFY_BITS(u0, 56);
223  /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
224  secp256k1_u128_accum_mul(&c, u0, R >> 4);
225  VERIFY_BITS_128(&c, 113);
226  /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
227  r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
228  VERIFY_BITS(r[0], 52);
229  VERIFY_BITS_128(&c, 61);
230  /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
231 
232  a0 *= 2;
233  secp256k1_u128_accum_mul(&c, a0, a1);
234  VERIFY_BITS_128(&c, 114);
235  /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
236  secp256k1_u128_accum_mul(&d, a2, a4);
237  secp256k1_u128_accum_mul(&d, a3, a3);
238  VERIFY_BITS_128(&d, 114);
239  /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
241  VERIFY_BITS_128(&c, 115);
242  VERIFY_BITS_128(&d, 62);
243  /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
244  r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
245  VERIFY_BITS(r[1], 52);
246  VERIFY_BITS_128(&c, 63);
247  /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
248 
249  secp256k1_u128_accum_mul(&c, a0, a2);
250  secp256k1_u128_accum_mul(&c, a1, a1);
251  VERIFY_BITS_128(&c, 114);
252  /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
253  secp256k1_u128_accum_mul(&d, a3, a4);
254  VERIFY_BITS_128(&d, 114);
255  /* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
257  VERIFY_BITS_128(&c, 115);
258  VERIFY_BITS_128(&d, 50);
259  /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
260  r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
261  VERIFY_BITS(r[2], 52);
262  VERIFY_BITS_128(&c, 63);
263  /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
264 
266  secp256k1_u128_accum_u64(&c, t3);
267  VERIFY_BITS_128(&c, 100);
268  /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
269  r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
270  VERIFY_BITS(r[3], 52);
271  VERIFY_BITS_128(&c, 48);
272  /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
273  r[4] = secp256k1_u128_to_u64(&c) + t4;
274  VERIFY_BITS(r[4], 49);
275  /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
276 }
277 
278 #endif /* SECP256K1_FIELD_INNER5X52_IMPL_H */
#define VERIFY_BITS_128(x, n)
static SECP256K1_INLINE void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t *SECP256K1_RESTRICT b)
#define VERIFY_BITS(x, n)
static SECP256K1_INLINE void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a)
static SECP256K1_INLINE void secp256k1_u128_rshift(secp256k1_uint128 *r, unsigned int n)
static SECP256K1_INLINE void secp256k1_u128_accum_u64(secp256k1_uint128 *r, uint64_t a)
static SECP256K1_INLINE void secp256k1_u128_accum_mul(secp256k1_uint128 *r, uint64_t a, uint64_t b)
static SECP256K1_INLINE void secp256k1_u128_mul(secp256k1_uint128 *r, uint64_t a, uint64_t b)
static SECP256K1_INLINE uint64_t secp256k1_u128_to_u64(const secp256k1_uint128 *a)
#define VERIFY_CHECK(cond)
Definition: util.h:100
#define SECP256K1_RESTRICT
Definition: util.h:160
#define SECP256K1_INLINE
Definition: secp256k1.h:131