7 #if defined HAVE_CONFIG_H
16 #undef USE_ECMULT_STATIC_PRECOMPUTATION
18 #ifndef EXHAUSTIVE_TEST_ORDER
20 #define EXHAUSTIVE_TEST_ORDER 13
21 #define EXHAUSTIVE_TEST_LAMBDA 9
29 #ifdef ENABLE_MODULE_RECOVERY
40 CHECK(secp256k1_fe_equal_var(&a->
x, &b->
x));
41 CHECK(secp256k1_fe_equal_var(&a->
y, &b->
y));
52 secp256k1_fe_sqr(&z2s, &b->
z);
53 secp256k1_fe_mul(&u1, &a->
x, &z2s);
54 u2 = b->
x; secp256k1_fe_normalize_weak(&u2);
55 secp256k1_fe_mul(&s1, &a->
y, &z2s); secp256k1_fe_mul(&s1, &s1, &b->
z);
56 s2 = b->
y; secp256k1_fe_normalize_weak(&s2);
57 CHECK(secp256k1_fe_equal_var(&u1, &u2));
58 CHECK(secp256k1_fe_equal_var(&s1, &s2));
62 unsigned char bin[32];
64 secp256k1_rand256(bin);
65 if (secp256k1_fe_set_b32(x, bin)) {
73 const unsigned char *key32,
const unsigned char *algo16,
74 void *data,
unsigned int attempt) {
87 secp256k1_scalar_set_int(&s, *idata);
88 secp256k1_scalar_get_b32(nonce32, &s);
92 #ifdef USE_ENDOMORPHISM
93 void test_exhaustive_endomorphism(
const secp256k1_ge *group,
int order) {
95 for (i = 0; i < order; i++) {
97 secp256k1_ge_mul_lambda(&res, &group[i]);
107 CHECK(secp256k1_ge_is_infinity(&group[0]));
108 CHECK(secp256k1_gej_is_infinity(&groupj[0]));
109 for (i = 1; i < order; i++) {
110 CHECK(!secp256k1_ge_is_infinity(&group[i]));
111 CHECK(!secp256k1_gej_is_infinity(&groupj[i]));
115 for (j = 0; j < order; j++) {
117 secp256k1_fe_inv(&fe_inv, &groupj[j].z);
118 for (i = 0; i < order; i++) {
122 secp256k1_gej_add_var(&tmp, &groupj[i], &groupj[j], NULL);
126 secp256k1_gej_add_ge(&tmp, &groupj[i], &group[j]);
130 secp256k1_gej_add_ge_var(&tmp, &groupj[i], &group[j], NULL);
134 zless_gej.
x = groupj[j].
x;
135 zless_gej.
y = groupj[j].
y;
136 secp256k1_gej_add_zinv_var(&tmp, &groupj[i], &zless_gej, &fe_inv);
142 for (i = 0; i < order; i++) {
145 secp256k1_gej_double_nonzero(&tmp, &groupj[i], NULL);
148 secp256k1_gej_double_var(&tmp, &groupj[i], NULL);
153 for (i = 1; i < order; i++) {
156 secp256k1_ge_neg(&tmp, &group[i]);
158 secp256k1_gej_neg(&tmpj, &groupj[i]);
165 for (r_log = 1; r_log < order; r_log++) {
166 for (j = 0; j < order; j++) {
167 for (i = 0; i < order; i++) {
170 secp256k1_scalar_set_int(&na, i);
171 secp256k1_scalar_set_int(&ng, j);
173 secp256k1_ecmult(&ctx->
ecmult_ctx, &tmp, &groupj[r_log], &na, &ng);
177 secp256k1_ecmult_const(&tmp, &group[i], &ng, 256);
200 for (i = 0; i < order; i++) {
201 for (j = 0; j < order; j++) {
202 for (k = 0; k < order; k++) {
203 for (x = 0; x < order; x++) {
204 for (y = 0; y < order; y++) {
209 secp256k1_scalar_set_int(&data.
sc[0], i);
210 secp256k1_scalar_set_int(&data.
sc[1], j);
211 secp256k1_scalar_set_int(&g_sc, k);
212 data.
pt[0] = group[x];
213 data.
pt[1] = group[y];
215 secp256k1_ecmult_multi_var(&ctx->
ecmult_ctx, scratch, &tmp, &g_sc, ecmult_multi_callback, &data, 2);
222 secp256k1_scratch_destroy(scratch);
227 unsigned char x_bin[32];
230 secp256k1_fe_normalize(&x);
231 secp256k1_fe_get_b32(x_bin, &x);
232 secp256k1_scalar_set_b32(r, x_bin, NULL);
237 for (s = 1; s < order; s++) {
238 for (r = 1; r < order; r++) {
239 for (msg = 1; msg < order; msg++) {
240 for (key = 1; key < order; key++) {
246 int k, should_verify;
247 unsigned char msg32[32];
249 secp256k1_scalar_set_int(&s_s, s);
250 secp256k1_scalar_set_int(&r_s, r);
251 secp256k1_scalar_set_int(&msg_s, msg);
252 secp256k1_scalar_set_int(&sk_s, key);
258 for (k = 0; k < order; k++) {
261 if (r_s == check_x_s) {
262 secp256k1_scalar_set_int(&s_times_k_s, k);
263 secp256k1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
264 secp256k1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
265 secp256k1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
266 should_verify |= secp256k1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
270 should_verify &= !secp256k1_scalar_is_high(&s_s);
273 secp256k1_ecdsa_signature_save(&sig, &r_s, &s_s);
274 memcpy(&nonconst_ge, &group[sk_s],
sizeof(nonconst_ge));
275 secp256k1_pubkey_save(&pk, &nonconst_ge);
276 secp256k1_scalar_get_b32(msg32, &msg_s);
277 CHECK(should_verify ==
289 for (i = 1; i < order; i++) {
290 for (j = 1; j < order; j++) {
291 for (k = 1; k < order; k++) {
292 const int starting_k = k;
295 unsigned char sk32[32], msg32[32];
296 secp256k1_scalar_set_int(&msg, i);
297 secp256k1_scalar_set_int(&sk, j);
298 secp256k1_scalar_get_b32(sk32, &sk);
299 secp256k1_scalar_get_b32(msg32, &msg);
303 secp256k1_ecdsa_signature_load(ctx, &r, &s, &sig);
308 CHECK(r == expected_r);
309 CHECK((k * s) % order == (i + r * j) % order ||
313 if (k < starting_k) {
330 #ifdef ENABLE_MODULE_RECOVERY
335 for (i = 1; i < order; i++) {
336 for (j = 1; j < order; j++) {
337 for (k = 1; k < order; k++) {
338 const int starting_k = k;
343 unsigned char sk32[32], msg32[32];
346 secp256k1_scalar_set_int(&msg, i);
347 secp256k1_scalar_set_int(&sk, j);
348 secp256k1_scalar_get_b32(sk32, &sk);
349 secp256k1_scalar_get_b32(msg32, &msg);
354 secp256k1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, &rsig);
356 CHECK(r == expected_r);
357 CHECK((k * s) % order == (i + r * j) % order ||
364 r_dot_y_normalized = group[k].
y;
365 secp256k1_fe_normalize(&r_dot_y_normalized);
367 if ((k * s) % order == (i + r * j) % order) {
368 expected_recid = secp256k1_fe_is_odd(&r_dot_y_normalized) ? 1 : 0;
370 expected_recid = secp256k1_fe_is_odd(&r_dot_y_normalized) ? 0 : 1;
372 CHECK(recid == expected_recid);
376 secp256k1_ecdsa_signature_load(ctx, &r, &s, &sig);
381 CHECK(r == expected_r);
382 CHECK((k * s) % order == (i + r * j) % order ||
386 if (k < starting_k) {
397 for (s = 1; s < order; s++) {
398 for (r = 1; r < order; r++) {
399 for (msg = 1; msg < order; msg++) {
400 for (key = 1; key < order; key++) {
408 int k, should_verify;
409 unsigned char msg32[32];
411 secp256k1_scalar_set_int(&s_s, s);
412 secp256k1_scalar_set_int(&r_s, r);
413 secp256k1_scalar_set_int(&msg_s, msg);
414 secp256k1_scalar_set_int(&sk_s, key);
415 secp256k1_scalar_get_b32(msg32, &msg_s);
421 for (k = 0; k < order; k++) {
424 if (r_s == check_x_s) {
425 secp256k1_scalar_set_int(&s_times_k_s, k);
426 secp256k1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
427 secp256k1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
428 secp256k1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
429 should_verify |= secp256k1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
433 should_verify &= !secp256k1_scalar_is_high(&s_s);
441 secp256k1_ecdsa_recoverable_signature_save(&rsig, &r_s, &s_s, recid);
443 memcpy(&nonconst_ge, &group[sk_s],
sizeof(nonconst_ge));
444 secp256k1_pubkey_save(&pk, &nonconst_ge);
445 CHECK(should_verify ==
465 secp256k1_gej_set_infinity(&groupj[0]);
466 secp256k1_ge_set_gej(&group[0], &groupj[0]);
472 secp256k1_gej_add_ge(&groupj[i], &groupj[i - 1], &secp256k1_ge_const_g);
473 secp256k1_ge_set_gej(&group[i], &groupj[i]);
474 secp256k1_gej_rescale(&groupj[i], &z);
482 secp256k1_scalar_set_int(&scalar_i, i);
483 secp256k1_ecmult_gen(&ctx->
ecmult_gen_ctx, &generatedj, &scalar_i);
484 secp256k1_ge_set_gej(&generated, &generatedj);
486 CHECK(group[i].infinity == 0);
488 CHECK(secp256k1_fe_equal_var(&generated.
x, &group[i].
x));
489 CHECK(secp256k1_fe_equal_var(&generated.
y, &group[i].
y));
494 #ifdef USE_ENDOMORPHISM
503 #ifdef ENABLE_MODULE_RECOVERY
void * memcpy(void *a, const void *b, size_t c)
#define SECP256K1_CONTEXT_SIGN
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int secp256k1_ecdsa_verify(const secp256k1_context *ctx, const secp256k1_ecdsa_signature *sig, const unsigned char *msg32, const secp256k1_pubkey *pubkey) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4)
Verify an ECDSA signature.
SECP256K1_API int secp256k1_ecdsa_sign(const secp256k1_context *ctx, secp256k1_ecdsa_signature *sig, const unsigned char *msg32, const unsigned char *seckey, secp256k1_nonce_function noncefp, const void *ndata) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4)
Create an ECDSA signature.
SECP256K1_API secp256k1_context * secp256k1_context_create(unsigned int flags) SECP256K1_WARN_UNUSED_RESULT
Create a secp256k1 context object.
#define SECP256K1_CONTEXT_VERIFY
Flags to pass to secp256k1_context_create.
SECP256K1_API void secp256k1_context_destroy(secp256k1_context *ctx)
Destroy a secp256k1 context object.
SECP256K1_API int secp256k1_ecdsa_sign_recoverable(const secp256k1_context *ctx, secp256k1_ecdsa_recoverable_signature *sig, const unsigned char *msg32, const unsigned char *seckey, secp256k1_nonce_function noncefp, const void *ndata) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4)
Create a recoverable ECDSA signature.
SECP256K1_API int secp256k1_ecdsa_recoverable_signature_convert(const secp256k1_context *ctx, secp256k1_ecdsa_signature *sig, const secp256k1_ecdsa_recoverable_signature *sigin) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3)
Convert a recoverable signature into a normal signature.
secp256k1_callback error_callback
secp256k1_ecmult_gen_context ecmult_gen_ctx
secp256k1_ecmult_context ecmult_ctx
Opaque data structured that holds a parsed ECDSA signature, supporting pubkey recovery.
Opaque data structured that holds a parsed ECDSA signature.
A group element of the secp256k1 curve, in affine coordinates.
A group element of the secp256k1 curve, in jacobian coordinates.
Opaque data structure that holds a parsed and valid public key.
A scalar modulo the group order of the secp256k1 curve.
void test_exhaustive_sign(const secp256k1_context *ctx, const secp256k1_ge *group, int order)
void test_exhaustive_ecmult_multi(const secp256k1_context *ctx, const secp256k1_ge *group, int order)
int secp256k1_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int attempt)
END stolen from tests.c.
void r_from_k(secp256k1_scalar *r, const secp256k1_ge *group, int k)
#define EXHAUSTIVE_TEST_LAMBDA
void ge_equals_ge(const secp256k1_ge *a, const secp256k1_ge *b)
stolen from tests.c
void ge_equals_gej(const secp256k1_ge *a, const secp256k1_gej *b)
void test_exhaustive_ecmult(const secp256k1_context *ctx, const secp256k1_ge *group, const secp256k1_gej *groupj, int order)
void test_exhaustive_addition(const secp256k1_ge *group, const secp256k1_gej *groupj, int order)
void test_exhaustive_verify(const secp256k1_context *ctx, const secp256k1_ge *group, int order)
void random_fe(secp256k1_fe *x)
#define EXHAUSTIVE_TEST_ORDER