Replace nacl & sha methods with versions from libsodium to avoid undefined behaviour

This commit is contained in:
Jeremy Lakeman 2016-01-25 17:10:42 +10:30
parent 7b6f167b9b
commit f2e2b4acb5
15 changed files with 1044 additions and 2026 deletions

View File

@ -1,5 +1,5 @@
#include "fe.h" #include "fe.h"
#include "crypto_int64.h" #include <stdint.h>
/* /*
h = f * g h = f * g
@ -33,162 +33,163 @@ Can get away with 11 carries, but then data flow is much deeper.
With tighter constraints on inputs can squeeze carries into int32. With tighter constraints on inputs can squeeze carries into int32.
*/ */
void fe_mul(fe h,const fe f,const fe g) void fe_mul(fe h,const fe f,const fe g)
{ {
crypto_int32 f0 = f[0]; int32_t f0 = f[0];
crypto_int32 f1 = f[1]; int32_t f1 = f[1];
crypto_int32 f2 = f[2]; int32_t f2 = f[2];
crypto_int32 f3 = f[3]; int32_t f3 = f[3];
crypto_int32 f4 = f[4]; int32_t f4 = f[4];
crypto_int32 f5 = f[5]; int32_t f5 = f[5];
crypto_int32 f6 = f[6]; int32_t f6 = f[6];
crypto_int32 f7 = f[7]; int32_t f7 = f[7];
crypto_int32 f8 = f[8]; int32_t f8 = f[8];
crypto_int32 f9 = f[9]; int32_t f9 = f[9];
crypto_int32 g0 = g[0]; int32_t g0 = g[0];
crypto_int32 g1 = g[1]; int32_t g1 = g[1];
crypto_int32 g2 = g[2]; int32_t g2 = g[2];
crypto_int32 g3 = g[3]; int32_t g3 = g[3];
crypto_int32 g4 = g[4]; int32_t g4 = g[4];
crypto_int32 g5 = g[5]; int32_t g5 = g[5];
crypto_int32 g6 = g[6]; int32_t g6 = g[6];
crypto_int32 g7 = g[7]; int32_t g7 = g[7];
crypto_int32 g8 = g[8]; int32_t g8 = g[8];
crypto_int32 g9 = g[9]; int32_t g9 = g[9];
crypto_int32 g1_19 = 19 * g1; /* 1.959375*2^29 */ int32_t g1_19 = 19 * g1; /* 1.959375*2^29 */
crypto_int32 g2_19 = 19 * g2; /* 1.959375*2^30; still ok */ int32_t g2_19 = 19 * g2; /* 1.959375*2^30; still ok */
crypto_int32 g3_19 = 19 * g3; int32_t g3_19 = 19 * g3;
crypto_int32 g4_19 = 19 * g4; int32_t g4_19 = 19 * g4;
crypto_int32 g5_19 = 19 * g5; int32_t g5_19 = 19 * g5;
crypto_int32 g6_19 = 19 * g6; int32_t g6_19 = 19 * g6;
crypto_int32 g7_19 = 19 * g7; int32_t g7_19 = 19 * g7;
crypto_int32 g8_19 = 19 * g8; int32_t g8_19 = 19 * g8;
crypto_int32 g9_19 = 19 * g9; int32_t g9_19 = 19 * g9;
crypto_int32 f1_2 = 2 * f1; int32_t f1_2 = 2 * f1;
crypto_int32 f3_2 = 2 * f3; int32_t f3_2 = 2 * f3;
crypto_int32 f5_2 = 2 * f5; int32_t f5_2 = 2 * f5;
crypto_int32 f7_2 = 2 * f7; int32_t f7_2 = 2 * f7;
crypto_int32 f9_2 = 2 * f9; int32_t f9_2 = 2 * f9;
crypto_int64 f0g0 = f0 * (crypto_int64) g0; int64_t f0g0 = f0 * (int64_t) g0;
crypto_int64 f0g1 = f0 * (crypto_int64) g1; int64_t f0g1 = f0 * (int64_t) g1;
crypto_int64 f0g2 = f0 * (crypto_int64) g2; int64_t f0g2 = f0 * (int64_t) g2;
crypto_int64 f0g3 = f0 * (crypto_int64) g3; int64_t f0g3 = f0 * (int64_t) g3;
crypto_int64 f0g4 = f0 * (crypto_int64) g4; int64_t f0g4 = f0 * (int64_t) g4;
crypto_int64 f0g5 = f0 * (crypto_int64) g5; int64_t f0g5 = f0 * (int64_t) g5;
crypto_int64 f0g6 = f0 * (crypto_int64) g6; int64_t f0g6 = f0 * (int64_t) g6;
crypto_int64 f0g7 = f0 * (crypto_int64) g7; int64_t f0g7 = f0 * (int64_t) g7;
crypto_int64 f0g8 = f0 * (crypto_int64) g8; int64_t f0g8 = f0 * (int64_t) g8;
crypto_int64 f0g9 = f0 * (crypto_int64) g9; int64_t f0g9 = f0 * (int64_t) g9;
crypto_int64 f1g0 = f1 * (crypto_int64) g0; int64_t f1g0 = f1 * (int64_t) g0;
crypto_int64 f1g1_2 = f1_2 * (crypto_int64) g1; int64_t f1g1_2 = f1_2 * (int64_t) g1;
crypto_int64 f1g2 = f1 * (crypto_int64) g2; int64_t f1g2 = f1 * (int64_t) g2;
crypto_int64 f1g3_2 = f1_2 * (crypto_int64) g3; int64_t f1g3_2 = f1_2 * (int64_t) g3;
crypto_int64 f1g4 = f1 * (crypto_int64) g4; int64_t f1g4 = f1 * (int64_t) g4;
crypto_int64 f1g5_2 = f1_2 * (crypto_int64) g5; int64_t f1g5_2 = f1_2 * (int64_t) g5;
crypto_int64 f1g6 = f1 * (crypto_int64) g6; int64_t f1g6 = f1 * (int64_t) g6;
crypto_int64 f1g7_2 = f1_2 * (crypto_int64) g7; int64_t f1g7_2 = f1_2 * (int64_t) g7;
crypto_int64 f1g8 = f1 * (crypto_int64) g8; int64_t f1g8 = f1 * (int64_t) g8;
crypto_int64 f1g9_38 = f1_2 * (crypto_int64) g9_19; int64_t f1g9_38 = f1_2 * (int64_t) g9_19;
crypto_int64 f2g0 = f2 * (crypto_int64) g0; int64_t f2g0 = f2 * (int64_t) g0;
crypto_int64 f2g1 = f2 * (crypto_int64) g1; int64_t f2g1 = f2 * (int64_t) g1;
crypto_int64 f2g2 = f2 * (crypto_int64) g2; int64_t f2g2 = f2 * (int64_t) g2;
crypto_int64 f2g3 = f2 * (crypto_int64) g3; int64_t f2g3 = f2 * (int64_t) g3;
crypto_int64 f2g4 = f2 * (crypto_int64) g4; int64_t f2g4 = f2 * (int64_t) g4;
crypto_int64 f2g5 = f2 * (crypto_int64) g5; int64_t f2g5 = f2 * (int64_t) g5;
crypto_int64 f2g6 = f2 * (crypto_int64) g6; int64_t f2g6 = f2 * (int64_t) g6;
crypto_int64 f2g7 = f2 * (crypto_int64) g7; int64_t f2g7 = f2 * (int64_t) g7;
crypto_int64 f2g8_19 = f2 * (crypto_int64) g8_19; int64_t f2g8_19 = f2 * (int64_t) g8_19;
crypto_int64 f2g9_19 = f2 * (crypto_int64) g9_19; int64_t f2g9_19 = f2 * (int64_t) g9_19;
crypto_int64 f3g0 = f3 * (crypto_int64) g0; int64_t f3g0 = f3 * (int64_t) g0;
crypto_int64 f3g1_2 = f3_2 * (crypto_int64) g1; int64_t f3g1_2 = f3_2 * (int64_t) g1;
crypto_int64 f3g2 = f3 * (crypto_int64) g2; int64_t f3g2 = f3 * (int64_t) g2;
crypto_int64 f3g3_2 = f3_2 * (crypto_int64) g3; int64_t f3g3_2 = f3_2 * (int64_t) g3;
crypto_int64 f3g4 = f3 * (crypto_int64) g4; int64_t f3g4 = f3 * (int64_t) g4;
crypto_int64 f3g5_2 = f3_2 * (crypto_int64) g5; int64_t f3g5_2 = f3_2 * (int64_t) g5;
crypto_int64 f3g6 = f3 * (crypto_int64) g6; int64_t f3g6 = f3 * (int64_t) g6;
crypto_int64 f3g7_38 = f3_2 * (crypto_int64) g7_19; int64_t f3g7_38 = f3_2 * (int64_t) g7_19;
crypto_int64 f3g8_19 = f3 * (crypto_int64) g8_19; int64_t f3g8_19 = f3 * (int64_t) g8_19;
crypto_int64 f3g9_38 = f3_2 * (crypto_int64) g9_19; int64_t f3g9_38 = f3_2 * (int64_t) g9_19;
crypto_int64 f4g0 = f4 * (crypto_int64) g0; int64_t f4g0 = f4 * (int64_t) g0;
crypto_int64 f4g1 = f4 * (crypto_int64) g1; int64_t f4g1 = f4 * (int64_t) g1;
crypto_int64 f4g2 = f4 * (crypto_int64) g2; int64_t f4g2 = f4 * (int64_t) g2;
crypto_int64 f4g3 = f4 * (crypto_int64) g3; int64_t f4g3 = f4 * (int64_t) g3;
crypto_int64 f4g4 = f4 * (crypto_int64) g4; int64_t f4g4 = f4 * (int64_t) g4;
crypto_int64 f4g5 = f4 * (crypto_int64) g5; int64_t f4g5 = f4 * (int64_t) g5;
crypto_int64 f4g6_19 = f4 * (crypto_int64) g6_19; int64_t f4g6_19 = f4 * (int64_t) g6_19;
crypto_int64 f4g7_19 = f4 * (crypto_int64) g7_19; int64_t f4g7_19 = f4 * (int64_t) g7_19;
crypto_int64 f4g8_19 = f4 * (crypto_int64) g8_19; int64_t f4g8_19 = f4 * (int64_t) g8_19;
crypto_int64 f4g9_19 = f4 * (crypto_int64) g9_19; int64_t f4g9_19 = f4 * (int64_t) g9_19;
crypto_int64 f5g0 = f5 * (crypto_int64) g0; int64_t f5g0 = f5 * (int64_t) g0;
crypto_int64 f5g1_2 = f5_2 * (crypto_int64) g1; int64_t f5g1_2 = f5_2 * (int64_t) g1;
crypto_int64 f5g2 = f5 * (crypto_int64) g2; int64_t f5g2 = f5 * (int64_t) g2;
crypto_int64 f5g3_2 = f5_2 * (crypto_int64) g3; int64_t f5g3_2 = f5_2 * (int64_t) g3;
crypto_int64 f5g4 = f5 * (crypto_int64) g4; int64_t f5g4 = f5 * (int64_t) g4;
crypto_int64 f5g5_38 = f5_2 * (crypto_int64) g5_19; int64_t f5g5_38 = f5_2 * (int64_t) g5_19;
crypto_int64 f5g6_19 = f5 * (crypto_int64) g6_19; int64_t f5g6_19 = f5 * (int64_t) g6_19;
crypto_int64 f5g7_38 = f5_2 * (crypto_int64) g7_19; int64_t f5g7_38 = f5_2 * (int64_t) g7_19;
crypto_int64 f5g8_19 = f5 * (crypto_int64) g8_19; int64_t f5g8_19 = f5 * (int64_t) g8_19;
crypto_int64 f5g9_38 = f5_2 * (crypto_int64) g9_19; int64_t f5g9_38 = f5_2 * (int64_t) g9_19;
crypto_int64 f6g0 = f6 * (crypto_int64) g0; int64_t f6g0 = f6 * (int64_t) g0;
crypto_int64 f6g1 = f6 * (crypto_int64) g1; int64_t f6g1 = f6 * (int64_t) g1;
crypto_int64 f6g2 = f6 * (crypto_int64) g2; int64_t f6g2 = f6 * (int64_t) g2;
crypto_int64 f6g3 = f6 * (crypto_int64) g3; int64_t f6g3 = f6 * (int64_t) g3;
crypto_int64 f6g4_19 = f6 * (crypto_int64) g4_19; int64_t f6g4_19 = f6 * (int64_t) g4_19;
crypto_int64 f6g5_19 = f6 * (crypto_int64) g5_19; int64_t f6g5_19 = f6 * (int64_t) g5_19;
crypto_int64 f6g6_19 = f6 * (crypto_int64) g6_19; int64_t f6g6_19 = f6 * (int64_t) g6_19;
crypto_int64 f6g7_19 = f6 * (crypto_int64) g7_19; int64_t f6g7_19 = f6 * (int64_t) g7_19;
crypto_int64 f6g8_19 = f6 * (crypto_int64) g8_19; int64_t f6g8_19 = f6 * (int64_t) g8_19;
crypto_int64 f6g9_19 = f6 * (crypto_int64) g9_19; int64_t f6g9_19 = f6 * (int64_t) g9_19;
crypto_int64 f7g0 = f7 * (crypto_int64) g0; int64_t f7g0 = f7 * (int64_t) g0;
crypto_int64 f7g1_2 = f7_2 * (crypto_int64) g1; int64_t f7g1_2 = f7_2 * (int64_t) g1;
crypto_int64 f7g2 = f7 * (crypto_int64) g2; int64_t f7g2 = f7 * (int64_t) g2;
crypto_int64 f7g3_38 = f7_2 * (crypto_int64) g3_19; int64_t f7g3_38 = f7_2 * (int64_t) g3_19;
crypto_int64 f7g4_19 = f7 * (crypto_int64) g4_19; int64_t f7g4_19 = f7 * (int64_t) g4_19;
crypto_int64 f7g5_38 = f7_2 * (crypto_int64) g5_19; int64_t f7g5_38 = f7_2 * (int64_t) g5_19;
crypto_int64 f7g6_19 = f7 * (crypto_int64) g6_19; int64_t f7g6_19 = f7 * (int64_t) g6_19;
crypto_int64 f7g7_38 = f7_2 * (crypto_int64) g7_19; int64_t f7g7_38 = f7_2 * (int64_t) g7_19;
crypto_int64 f7g8_19 = f7 * (crypto_int64) g8_19; int64_t f7g8_19 = f7 * (int64_t) g8_19;
crypto_int64 f7g9_38 = f7_2 * (crypto_int64) g9_19; int64_t f7g9_38 = f7_2 * (int64_t) g9_19;
crypto_int64 f8g0 = f8 * (crypto_int64) g0; int64_t f8g0 = f8 * (int64_t) g0;
crypto_int64 f8g1 = f8 * (crypto_int64) g1; int64_t f8g1 = f8 * (int64_t) g1;
crypto_int64 f8g2_19 = f8 * (crypto_int64) g2_19; int64_t f8g2_19 = f8 * (int64_t) g2_19;
crypto_int64 f8g3_19 = f8 * (crypto_int64) g3_19; int64_t f8g3_19 = f8 * (int64_t) g3_19;
crypto_int64 f8g4_19 = f8 * (crypto_int64) g4_19; int64_t f8g4_19 = f8 * (int64_t) g4_19;
crypto_int64 f8g5_19 = f8 * (crypto_int64) g5_19; int64_t f8g5_19 = f8 * (int64_t) g5_19;
crypto_int64 f8g6_19 = f8 * (crypto_int64) g6_19; int64_t f8g6_19 = f8 * (int64_t) g6_19;
crypto_int64 f8g7_19 = f8 * (crypto_int64) g7_19; int64_t f8g7_19 = f8 * (int64_t) g7_19;
crypto_int64 f8g8_19 = f8 * (crypto_int64) g8_19; int64_t f8g8_19 = f8 * (int64_t) g8_19;
crypto_int64 f8g9_19 = f8 * (crypto_int64) g9_19; int64_t f8g9_19 = f8 * (int64_t) g9_19;
crypto_int64 f9g0 = f9 * (crypto_int64) g0; int64_t f9g0 = f9 * (int64_t) g0;
crypto_int64 f9g1_38 = f9_2 * (crypto_int64) g1_19; int64_t f9g1_38 = f9_2 * (int64_t) g1_19;
crypto_int64 f9g2_19 = f9 * (crypto_int64) g2_19; int64_t f9g2_19 = f9 * (int64_t) g2_19;
crypto_int64 f9g3_38 = f9_2 * (crypto_int64) g3_19; int64_t f9g3_38 = f9_2 * (int64_t) g3_19;
crypto_int64 f9g4_19 = f9 * (crypto_int64) g4_19; int64_t f9g4_19 = f9 * (int64_t) g4_19;
crypto_int64 f9g5_38 = f9_2 * (crypto_int64) g5_19; int64_t f9g5_38 = f9_2 * (int64_t) g5_19;
crypto_int64 f9g6_19 = f9 * (crypto_int64) g6_19; int64_t f9g6_19 = f9 * (int64_t) g6_19;
crypto_int64 f9g7_38 = f9_2 * (crypto_int64) g7_19; int64_t f9g7_38 = f9_2 * (int64_t) g7_19;
crypto_int64 f9g8_19 = f9 * (crypto_int64) g8_19; int64_t f9g8_19 = f9 * (int64_t) g8_19;
crypto_int64 f9g9_38 = f9_2 * (crypto_int64) g9_19; int64_t f9g9_38 = f9_2 * (int64_t) g9_19;
crypto_int64 h0 = f0g0+f1g9_38+f2g8_19+f3g7_38+f4g6_19+f5g5_38+f6g4_19+f7g3_38+f8g2_19+f9g1_38; int64_t h0 = f0g0+f1g9_38+f2g8_19+f3g7_38+f4g6_19+f5g5_38+f6g4_19+f7g3_38+f8g2_19+f9g1_38;
crypto_int64 h1 = f0g1+f1g0 +f2g9_19+f3g8_19+f4g7_19+f5g6_19+f6g5_19+f7g4_19+f8g3_19+f9g2_19; int64_t h1 = f0g1+f1g0 +f2g9_19+f3g8_19+f4g7_19+f5g6_19+f6g5_19+f7g4_19+f8g3_19+f9g2_19;
crypto_int64 h2 = f0g2+f1g1_2 +f2g0 +f3g9_38+f4g8_19+f5g7_38+f6g6_19+f7g5_38+f8g4_19+f9g3_38; int64_t h2 = f0g2+f1g1_2 +f2g0 +f3g9_38+f4g8_19+f5g7_38+f6g6_19+f7g5_38+f8g4_19+f9g3_38;
crypto_int64 h3 = f0g3+f1g2 +f2g1 +f3g0 +f4g9_19+f5g8_19+f6g7_19+f7g6_19+f8g5_19+f9g4_19; int64_t h3 = f0g3+f1g2 +f2g1 +f3g0 +f4g9_19+f5g8_19+f6g7_19+f7g6_19+f8g5_19+f9g4_19;
crypto_int64 h4 = f0g4+f1g3_2 +f2g2 +f3g1_2 +f4g0 +f5g9_38+f6g8_19+f7g7_38+f8g6_19+f9g5_38; int64_t h4 = f0g4+f1g3_2 +f2g2 +f3g1_2 +f4g0 +f5g9_38+f6g8_19+f7g7_38+f8g6_19+f9g5_38;
crypto_int64 h5 = f0g5+f1g4 +f2g3 +f3g2 +f4g1 +f5g0 +f6g9_19+f7g8_19+f8g7_19+f9g6_19; int64_t h5 = f0g5+f1g4 +f2g3 +f3g2 +f4g1 +f5g0 +f6g9_19+f7g8_19+f8g7_19+f9g6_19;
crypto_int64 h6 = f0g6+f1g5_2 +f2g4 +f3g3_2 +f4g2 +f5g1_2 +f6g0 +f7g9_38+f8g8_19+f9g7_38; int64_t h6 = f0g6+f1g5_2 +f2g4 +f3g3_2 +f4g2 +f5g1_2 +f6g0 +f7g9_38+f8g8_19+f9g7_38;
crypto_int64 h7 = f0g7+f1g6 +f2g5 +f3g4 +f4g3 +f5g2 +f6g1 +f7g0 +f8g9_19+f9g8_19; int64_t h7 = f0g7+f1g6 +f2g5 +f3g4 +f4g3 +f5g2 +f6g1 +f7g0 +f8g9_19+f9g8_19;
crypto_int64 h8 = f0g8+f1g7_2 +f2g6 +f3g5_2 +f4g4 +f5g3_2 +f6g2 +f7g1_2 +f8g0 +f9g9_38; int64_t h8 = f0g8+f1g7_2 +f2g6 +f3g5_2 +f4g4 +f5g3_2 +f6g2 +f7g1_2 +f8g0 +f9g9_38;
crypto_int64 h9 = f0g9+f1g8 +f2g7 +f3g6 +f4g5 +f5g4 +f6g3 +f7g2 +f8g1 +f9g0 ; int64_t h9 = f0g9+f1g8 +f2g7 +f3g6 +f4g5 +f5g4 +f6g3 +f7g2 +f8g1 +f9g0 ;
crypto_int64 carry0; int64_t carry0;
crypto_int64 carry1; int64_t carry1;
crypto_int64 carry2; int64_t carry2;
crypto_int64 carry3; int64_t carry3;
crypto_int64 carry4; int64_t carry4;
crypto_int64 carry5; int64_t carry5;
crypto_int64 carry6; int64_t carry6;
crypto_int64 carry7; int64_t carry7;
crypto_int64 carry8; int64_t carry8;
crypto_int64 carry9; int64_t carry9;
/* /*
|h0| <= (1.65*1.65*2^52*(1+19+19+19+19)+1.65*1.65*2^50*(38+38+38+38+38)) |h0| <= (1.65*1.65*2^52*(1+19+19+19+19)+1.65*1.65*2^50*(38+38+38+38+38))
@ -197,57 +198,57 @@ void fe_mul(fe h,const fe f,const fe g)
i.e. |h1| <= 1.7*2^59; narrower ranges for h3, h5, h7, h9 i.e. |h1| <= 1.7*2^59; narrower ranges for h3, h5, h7, h9
*/ */
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
/* |h0| <= 2^25 */ /* |h0| <= 2^25 */
/* |h4| <= 2^25 */ /* |h4| <= 2^25 */
/* |h1| <= 1.71*2^59 */ /* |h1| <= 1.71*2^59 */
/* |h5| <= 1.71*2^59 */ /* |h5| <= 1.71*2^59 */
carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; carry1 = (h1 + (int64_t) (1L << 24)) >> 25; h2 += carry1; h1 -= carry1 * ((uint64_t) 1L << 25);
carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; carry5 = (h5 + (int64_t) (1L << 24)) >> 25; h6 += carry5; h5 -= carry5 * ((uint64_t) 1L << 25);
/* |h1| <= 2^24; from now on fits into int32 */ /* |h1| <= 2^24; from now on fits into int32 */
/* |h5| <= 2^24; from now on fits into int32 */ /* |h5| <= 2^24; from now on fits into int32 */
/* |h2| <= 1.41*2^60 */ /* |h2| <= 1.41*2^60 */
/* |h6| <= 1.41*2^60 */ /* |h6| <= 1.41*2^60 */
carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; carry2 = (h2 + (int64_t) (1L << 25)) >> 26; h3 += carry2; h2 -= carry2 * ((uint64_t) 1L << 26);
carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; carry6 = (h6 + (int64_t) (1L << 25)) >> 26; h7 += carry6; h6 -= carry6 * ((uint64_t) 1L << 26);
/* |h2| <= 2^25; from now on fits into int32 unchanged */ /* |h2| <= 2^25; from now on fits into int32 unchanged */
/* |h6| <= 2^25; from now on fits into int32 unchanged */ /* |h6| <= 2^25; from now on fits into int32 unchanged */
/* |h3| <= 1.71*2^59 */ /* |h3| <= 1.71*2^59 */
/* |h7| <= 1.71*2^59 */ /* |h7| <= 1.71*2^59 */
carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; carry3 = (h3 + (int64_t) (1L << 24)) >> 25; h4 += carry3; h3 -= carry3 * ((uint64_t) 1L << 25);
carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; carry7 = (h7 + (int64_t) (1L << 24)) >> 25; h8 += carry7; h7 -= carry7 * ((uint64_t) 1L << 25);
/* |h3| <= 2^24; from now on fits into int32 unchanged */ /* |h3| <= 2^24; from now on fits into int32 unchanged */
/* |h7| <= 2^24; from now on fits into int32 unchanged */ /* |h7| <= 2^24; from now on fits into int32 unchanged */
/* |h4| <= 1.72*2^34 */ /* |h4| <= 1.72*2^34 */
/* |h8| <= 1.41*2^60 */ /* |h8| <= 1.41*2^60 */
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; carry8 = (h8 + (int64_t) (1L << 25)) >> 26; h9 += carry8; h8 -= carry8 * ((uint64_t) 1L << 26);
/* |h4| <= 2^25; from now on fits into int32 unchanged */ /* |h4| <= 2^25; from now on fits into int32 unchanged */
/* |h8| <= 2^25; from now on fits into int32 unchanged */ /* |h8| <= 2^25; from now on fits into int32 unchanged */
/* |h5| <= 1.01*2^24 */ /* |h5| <= 1.01*2^24 */
/* |h9| <= 1.71*2^59 */ /* |h9| <= 1.71*2^59 */
carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; carry9 = (h9 + (int64_t) (1L << 24)) >> 25; h0 += carry9 * 19; h9 -= carry9 * ((uint64_t) 1L << 25);
/* |h9| <= 2^24; from now on fits into int32 unchanged */ /* |h9| <= 2^24; from now on fits into int32 unchanged */
/* |h0| <= 1.1*2^39 */ /* |h0| <= 1.1*2^39 */
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
/* |h0| <= 2^25; from now on fits into int32 unchanged */ /* |h0| <= 2^25; from now on fits into int32 unchanged */
/* |h1| <= 1.01*2^24 */ /* |h1| <= 1.01*2^24 */
h[0] = h0; h[0] = (int32_t) h0;
h[1] = h1; h[1] = (int32_t) h1;
h[2] = h2; h[2] = (int32_t) h2;
h[3] = h3; h[3] = (int32_t) h3;
h[4] = h4; h[4] = (int32_t) h4;
h[5] = h5; h[5] = (int32_t) h5;
h[6] = h6; h[6] = (int32_t) h6;
h[7] = h7; h[7] = (int32_t) h7;
h[8] = h8; h[8] = (int32_t) h8;
h[9] = h9; h[9] = (int32_t) h9;
} }

View File

@ -1,5 +1,5 @@
#include "fe.h" #include "fe.h"
#include "crypto_int64.h" #include <stdint.h>
/* /*
h = f * f h = f * f
@ -18,132 +18,132 @@ See fe_mul.c for discussion of implementation strategy.
void fe_sq(fe h,const fe f) void fe_sq(fe h,const fe f)
{ {
crypto_int32 f0 = f[0]; int32_t f0 = f[0];
crypto_int32 f1 = f[1]; int32_t f1 = f[1];
crypto_int32 f2 = f[2]; int32_t f2 = f[2];
crypto_int32 f3 = f[3]; int32_t f3 = f[3];
crypto_int32 f4 = f[4]; int32_t f4 = f[4];
crypto_int32 f5 = f[5]; int32_t f5 = f[5];
crypto_int32 f6 = f[6]; int32_t f6 = f[6];
crypto_int32 f7 = f[7]; int32_t f7 = f[7];
crypto_int32 f8 = f[8]; int32_t f8 = f[8];
crypto_int32 f9 = f[9]; int32_t f9 = f[9];
crypto_int32 f0_2 = 2 * f0; int32_t f0_2 = 2 * f0;
crypto_int32 f1_2 = 2 * f1; int32_t f1_2 = 2 * f1;
crypto_int32 f2_2 = 2 * f2; int32_t f2_2 = 2 * f2;
crypto_int32 f3_2 = 2 * f3; int32_t f3_2 = 2 * f3;
crypto_int32 f4_2 = 2 * f4; int32_t f4_2 = 2 * f4;
crypto_int32 f5_2 = 2 * f5; int32_t f5_2 = 2 * f5;
crypto_int32 f6_2 = 2 * f6; int32_t f6_2 = 2 * f6;
crypto_int32 f7_2 = 2 * f7; int32_t f7_2 = 2 * f7;
crypto_int32 f5_38 = 38 * f5; /* 1.959375*2^30 */ int32_t f5_38 = 38 * f5; /* 1.959375*2^30 */
crypto_int32 f6_19 = 19 * f6; /* 1.959375*2^30 */ int32_t f6_19 = 19 * f6; /* 1.959375*2^30 */
crypto_int32 f7_38 = 38 * f7; /* 1.959375*2^30 */ int32_t f7_38 = 38 * f7; /* 1.959375*2^30 */
crypto_int32 f8_19 = 19 * f8; /* 1.959375*2^30 */ int32_t f8_19 = 19 * f8; /* 1.959375*2^30 */
crypto_int32 f9_38 = 38 * f9; /* 1.959375*2^30 */ int32_t f9_38 = 38 * f9; /* 1.959375*2^30 */
crypto_int64 f0f0 = f0 * (crypto_int64) f0; int64_t f0f0 = f0 * (int64_t) f0;
crypto_int64 f0f1_2 = f0_2 * (crypto_int64) f1; int64_t f0f1_2 = f0_2 * (int64_t) f1;
crypto_int64 f0f2_2 = f0_2 * (crypto_int64) f2; int64_t f0f2_2 = f0_2 * (int64_t) f2;
crypto_int64 f0f3_2 = f0_2 * (crypto_int64) f3; int64_t f0f3_2 = f0_2 * (int64_t) f3;
crypto_int64 f0f4_2 = f0_2 * (crypto_int64) f4; int64_t f0f4_2 = f0_2 * (int64_t) f4;
crypto_int64 f0f5_2 = f0_2 * (crypto_int64) f5; int64_t f0f5_2 = f0_2 * (int64_t) f5;
crypto_int64 f0f6_2 = f0_2 * (crypto_int64) f6; int64_t f0f6_2 = f0_2 * (int64_t) f6;
crypto_int64 f0f7_2 = f0_2 * (crypto_int64) f7; int64_t f0f7_2 = f0_2 * (int64_t) f7;
crypto_int64 f0f8_2 = f0_2 * (crypto_int64) f8; int64_t f0f8_2 = f0_2 * (int64_t) f8;
crypto_int64 f0f9_2 = f0_2 * (crypto_int64) f9; int64_t f0f9_2 = f0_2 * (int64_t) f9;
crypto_int64 f1f1_2 = f1_2 * (crypto_int64) f1; int64_t f1f1_2 = f1_2 * (int64_t) f1;
crypto_int64 f1f2_2 = f1_2 * (crypto_int64) f2; int64_t f1f2_2 = f1_2 * (int64_t) f2;
crypto_int64 f1f3_4 = f1_2 * (crypto_int64) f3_2; int64_t f1f3_4 = f1_2 * (int64_t) f3_2;
crypto_int64 f1f4_2 = f1_2 * (crypto_int64) f4; int64_t f1f4_2 = f1_2 * (int64_t) f4;
crypto_int64 f1f5_4 = f1_2 * (crypto_int64) f5_2; int64_t f1f5_4 = f1_2 * (int64_t) f5_2;
crypto_int64 f1f6_2 = f1_2 * (crypto_int64) f6; int64_t f1f6_2 = f1_2 * (int64_t) f6;
crypto_int64 f1f7_4 = f1_2 * (crypto_int64) f7_2; int64_t f1f7_4 = f1_2 * (int64_t) f7_2;
crypto_int64 f1f8_2 = f1_2 * (crypto_int64) f8; int64_t f1f8_2 = f1_2 * (int64_t) f8;
crypto_int64 f1f9_76 = f1_2 * (crypto_int64) f9_38; int64_t f1f9_76 = f1_2 * (int64_t) f9_38;
crypto_int64 f2f2 = f2 * (crypto_int64) f2; int64_t f2f2 = f2 * (int64_t) f2;
crypto_int64 f2f3_2 = f2_2 * (crypto_int64) f3; int64_t f2f3_2 = f2_2 * (int64_t) f3;
crypto_int64 f2f4_2 = f2_2 * (crypto_int64) f4; int64_t f2f4_2 = f2_2 * (int64_t) f4;
crypto_int64 f2f5_2 = f2_2 * (crypto_int64) f5; int64_t f2f5_2 = f2_2 * (int64_t) f5;
crypto_int64 f2f6_2 = f2_2 * (crypto_int64) f6; int64_t f2f6_2 = f2_2 * (int64_t) f6;
crypto_int64 f2f7_2 = f2_2 * (crypto_int64) f7; int64_t f2f7_2 = f2_2 * (int64_t) f7;
crypto_int64 f2f8_38 = f2_2 * (crypto_int64) f8_19; int64_t f2f8_38 = f2_2 * (int64_t) f8_19;
crypto_int64 f2f9_38 = f2 * (crypto_int64) f9_38; int64_t f2f9_38 = f2 * (int64_t) f9_38;
crypto_int64 f3f3_2 = f3_2 * (crypto_int64) f3; int64_t f3f3_2 = f3_2 * (int64_t) f3;
crypto_int64 f3f4_2 = f3_2 * (crypto_int64) f4; int64_t f3f4_2 = f3_2 * (int64_t) f4;
crypto_int64 f3f5_4 = f3_2 * (crypto_int64) f5_2; int64_t f3f5_4 = f3_2 * (int64_t) f5_2;
crypto_int64 f3f6_2 = f3_2 * (crypto_int64) f6; int64_t f3f6_2 = f3_2 * (int64_t) f6;
crypto_int64 f3f7_76 = f3_2 * (crypto_int64) f7_38; int64_t f3f7_76 = f3_2 * (int64_t) f7_38;
crypto_int64 f3f8_38 = f3_2 * (crypto_int64) f8_19; int64_t f3f8_38 = f3_2 * (int64_t) f8_19;
crypto_int64 f3f9_76 = f3_2 * (crypto_int64) f9_38; int64_t f3f9_76 = f3_2 * (int64_t) f9_38;
crypto_int64 f4f4 = f4 * (crypto_int64) f4; int64_t f4f4 = f4 * (int64_t) f4;
crypto_int64 f4f5_2 = f4_2 * (crypto_int64) f5; int64_t f4f5_2 = f4_2 * (int64_t) f5;
crypto_int64 f4f6_38 = f4_2 * (crypto_int64) f6_19; int64_t f4f6_38 = f4_2 * (int64_t) f6_19;
crypto_int64 f4f7_38 = f4 * (crypto_int64) f7_38; int64_t f4f7_38 = f4 * (int64_t) f7_38;
crypto_int64 f4f8_38 = f4_2 * (crypto_int64) f8_19; int64_t f4f8_38 = f4_2 * (int64_t) f8_19;
crypto_int64 f4f9_38 = f4 * (crypto_int64) f9_38; int64_t f4f9_38 = f4 * (int64_t) f9_38;
crypto_int64 f5f5_38 = f5 * (crypto_int64) f5_38; int64_t f5f5_38 = f5 * (int64_t) f5_38;
crypto_int64 f5f6_38 = f5_2 * (crypto_int64) f6_19; int64_t f5f6_38 = f5_2 * (int64_t) f6_19;
crypto_int64 f5f7_76 = f5_2 * (crypto_int64) f7_38; int64_t f5f7_76 = f5_2 * (int64_t) f7_38;
crypto_int64 f5f8_38 = f5_2 * (crypto_int64) f8_19; int64_t f5f8_38 = f5_2 * (int64_t) f8_19;
crypto_int64 f5f9_76 = f5_2 * (crypto_int64) f9_38; int64_t f5f9_76 = f5_2 * (int64_t) f9_38;
crypto_int64 f6f6_19 = f6 * (crypto_int64) f6_19; int64_t f6f6_19 = f6 * (int64_t) f6_19;
crypto_int64 f6f7_38 = f6 * (crypto_int64) f7_38; int64_t f6f7_38 = f6 * (int64_t) f7_38;
crypto_int64 f6f8_38 = f6_2 * (crypto_int64) f8_19; int64_t f6f8_38 = f6_2 * (int64_t) f8_19;
crypto_int64 f6f9_38 = f6 * (crypto_int64) f9_38; int64_t f6f9_38 = f6 * (int64_t) f9_38;
crypto_int64 f7f7_38 = f7 * (crypto_int64) f7_38; int64_t f7f7_38 = f7 * (int64_t) f7_38;
crypto_int64 f7f8_38 = f7_2 * (crypto_int64) f8_19; int64_t f7f8_38 = f7_2 * (int64_t) f8_19;
crypto_int64 f7f9_76 = f7_2 * (crypto_int64) f9_38; int64_t f7f9_76 = f7_2 * (int64_t) f9_38;
crypto_int64 f8f8_19 = f8 * (crypto_int64) f8_19; int64_t f8f8_19 = f8 * (int64_t) f8_19;
crypto_int64 f8f9_38 = f8 * (crypto_int64) f9_38; int64_t f8f9_38 = f8 * (int64_t) f9_38;
crypto_int64 f9f9_38 = f9 * (crypto_int64) f9_38; int64_t f9f9_38 = f9 * (int64_t) f9_38;
crypto_int64 h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; int64_t h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38;
crypto_int64 h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; int64_t h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38;
crypto_int64 h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; int64_t h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19;
crypto_int64 h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; int64_t h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38;
crypto_int64 h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; int64_t h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38;
crypto_int64 h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; int64_t h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38;
crypto_int64 h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; int64_t h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19;
crypto_int64 h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; int64_t h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38;
crypto_int64 h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; int64_t h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38;
crypto_int64 h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; int64_t h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2;
crypto_int64 carry0; int64_t carry0;
crypto_int64 carry1; int64_t carry1;
crypto_int64 carry2; int64_t carry2;
crypto_int64 carry3; int64_t carry3;
crypto_int64 carry4; int64_t carry4;
crypto_int64 carry5; int64_t carry5;
crypto_int64 carry6; int64_t carry6;
crypto_int64 carry7; int64_t carry7;
crypto_int64 carry8; int64_t carry8;
crypto_int64 carry9; int64_t carry9;
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; carry1 = (h1 + (int64_t) (1L << 24)) >> 25; h2 += carry1; h1 -= carry1 * ((uint64_t) 1L << 25);
carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; carry5 = (h5 + (int64_t) (1L << 24)) >> 25; h6 += carry5; h5 -= carry5 * ((uint64_t) 1L << 25);
carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; carry2 = (h2 + (int64_t) (1L << 25)) >> 26; h3 += carry2; h2 -= carry2 * ((uint64_t) 1L << 26);
carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; carry6 = (h6 + (int64_t) (1L << 25)) >> 26; h7 += carry6; h6 -= carry6 * ((uint64_t) 1L << 26);
carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; carry3 = (h3 + (int64_t) (1L << 24)) >> 25; h4 += carry3; h3 -= carry3 * ((uint64_t) 1L << 25);
carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; carry7 = (h7 + (int64_t) (1L << 24)) >> 25; h8 += carry7; h7 -= carry7 * ((uint64_t) 1L << 25);
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; carry8 = (h8 + (int64_t) (1L << 25)) >> 26; h9 += carry8; h8 -= carry8 * ((uint64_t) 1L << 26);
carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; carry9 = (h9 + (int64_t) (1L << 24)) >> 25; h0 += carry9 * 19; h9 -= carry9 * ((uint64_t) 1L << 25);
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
h[0] = h0; h[0] = (int32_t) h0;
h[1] = h1; h[1] = (int32_t) h1;
h[2] = h2; h[2] = (int32_t) h2;
h[3] = h3; h[3] = (int32_t) h3;
h[4] = h4; h[4] = (int32_t) h4;
h[5] = h5; h[5] = (int32_t) h5;
h[6] = h6; h[6] = (int32_t) h6;
h[7] = h7; h[7] = (int32_t) h7;
h[8] = h8; h[8] = (int32_t) h8;
h[9] = h9; h[9] = (int32_t) h9;
} }

View File

@ -1,5 +1,5 @@
#include "fe.h" #include "fe.h"
#include "crypto_int64.h" #include <stdint.h>
/* /*
h = 2 * f * f h = 2 * f * f
@ -18,104 +18,104 @@ See fe_mul.c for discussion of implementation strategy.
void fe_sq2(fe h,const fe f) void fe_sq2(fe h,const fe f)
{ {
crypto_int32 f0 = f[0]; int32_t f0 = f[0];
crypto_int32 f1 = f[1]; int32_t f1 = f[1];
crypto_int32 f2 = f[2]; int32_t f2 = f[2];
crypto_int32 f3 = f[3]; int32_t f3 = f[3];
crypto_int32 f4 = f[4]; int32_t f4 = f[4];
crypto_int32 f5 = f[5]; int32_t f5 = f[5];
crypto_int32 f6 = f[6]; int32_t f6 = f[6];
crypto_int32 f7 = f[7]; int32_t f7 = f[7];
crypto_int32 f8 = f[8]; int32_t f8 = f[8];
crypto_int32 f9 = f[9]; int32_t f9 = f[9];
crypto_int32 f0_2 = 2 * f0; int32_t f0_2 = 2 * f0;
crypto_int32 f1_2 = 2 * f1; int32_t f1_2 = 2 * f1;
crypto_int32 f2_2 = 2 * f2; int32_t f2_2 = 2 * f2;
crypto_int32 f3_2 = 2 * f3; int32_t f3_2 = 2 * f3;
crypto_int32 f4_2 = 2 * f4; int32_t f4_2 = 2 * f4;
crypto_int32 f5_2 = 2 * f5; int32_t f5_2 = 2 * f5;
crypto_int32 f6_2 = 2 * f6; int32_t f6_2 = 2 * f6;
crypto_int32 f7_2 = 2 * f7; int32_t f7_2 = 2 * f7;
crypto_int32 f5_38 = 38 * f5; /* 1.959375*2^30 */ int32_t f5_38 = 38 * f5; /* 1.959375*2^30 */
crypto_int32 f6_19 = 19 * f6; /* 1.959375*2^30 */ int32_t f6_19 = 19 * f6; /* 1.959375*2^30 */
crypto_int32 f7_38 = 38 * f7; /* 1.959375*2^30 */ int32_t f7_38 = 38 * f7; /* 1.959375*2^30 */
crypto_int32 f8_19 = 19 * f8; /* 1.959375*2^30 */ int32_t f8_19 = 19 * f8; /* 1.959375*2^30 */
crypto_int32 f9_38 = 38 * f9; /* 1.959375*2^30 */ int32_t f9_38 = 38 * f9; /* 1.959375*2^30 */
crypto_int64 f0f0 = f0 * (crypto_int64) f0; int64_t f0f0 = f0 * (int64_t) f0;
crypto_int64 f0f1_2 = f0_2 * (crypto_int64) f1; int64_t f0f1_2 = f0_2 * (int64_t) f1;
crypto_int64 f0f2_2 = f0_2 * (crypto_int64) f2; int64_t f0f2_2 = f0_2 * (int64_t) f2;
crypto_int64 f0f3_2 = f0_2 * (crypto_int64) f3; int64_t f0f3_2 = f0_2 * (int64_t) f3;
crypto_int64 f0f4_2 = f0_2 * (crypto_int64) f4; int64_t f0f4_2 = f0_2 * (int64_t) f4;
crypto_int64 f0f5_2 = f0_2 * (crypto_int64) f5; int64_t f0f5_2 = f0_2 * (int64_t) f5;
crypto_int64 f0f6_2 = f0_2 * (crypto_int64) f6; int64_t f0f6_2 = f0_2 * (int64_t) f6;
crypto_int64 f0f7_2 = f0_2 * (crypto_int64) f7; int64_t f0f7_2 = f0_2 * (int64_t) f7;
crypto_int64 f0f8_2 = f0_2 * (crypto_int64) f8; int64_t f0f8_2 = f0_2 * (int64_t) f8;
crypto_int64 f0f9_2 = f0_2 * (crypto_int64) f9; int64_t f0f9_2 = f0_2 * (int64_t) f9;
crypto_int64 f1f1_2 = f1_2 * (crypto_int64) f1; int64_t f1f1_2 = f1_2 * (int64_t) f1;
crypto_int64 f1f2_2 = f1_2 * (crypto_int64) f2; int64_t f1f2_2 = f1_2 * (int64_t) f2;
crypto_int64 f1f3_4 = f1_2 * (crypto_int64) f3_2; int64_t f1f3_4 = f1_2 * (int64_t) f3_2;
crypto_int64 f1f4_2 = f1_2 * (crypto_int64) f4; int64_t f1f4_2 = f1_2 * (int64_t) f4;
crypto_int64 f1f5_4 = f1_2 * (crypto_int64) f5_2; int64_t f1f5_4 = f1_2 * (int64_t) f5_2;
crypto_int64 f1f6_2 = f1_2 * (crypto_int64) f6; int64_t f1f6_2 = f1_2 * (int64_t) f6;
crypto_int64 f1f7_4 = f1_2 * (crypto_int64) f7_2; int64_t f1f7_4 = f1_2 * (int64_t) f7_2;
crypto_int64 f1f8_2 = f1_2 * (crypto_int64) f8; int64_t f1f8_2 = f1_2 * (int64_t) f8;
crypto_int64 f1f9_76 = f1_2 * (crypto_int64) f9_38; int64_t f1f9_76 = f1_2 * (int64_t) f9_38;
crypto_int64 f2f2 = f2 * (crypto_int64) f2; int64_t f2f2 = f2 * (int64_t) f2;
crypto_int64 f2f3_2 = f2_2 * (crypto_int64) f3; int64_t f2f3_2 = f2_2 * (int64_t) f3;
crypto_int64 f2f4_2 = f2_2 * (crypto_int64) f4; int64_t f2f4_2 = f2_2 * (int64_t) f4;
crypto_int64 f2f5_2 = f2_2 * (crypto_int64) f5; int64_t f2f5_2 = f2_2 * (int64_t) f5;
crypto_int64 f2f6_2 = f2_2 * (crypto_int64) f6; int64_t f2f6_2 = f2_2 * (int64_t) f6;
crypto_int64 f2f7_2 = f2_2 * (crypto_int64) f7; int64_t f2f7_2 = f2_2 * (int64_t) f7;
crypto_int64 f2f8_38 = f2_2 * (crypto_int64) f8_19; int64_t f2f8_38 = f2_2 * (int64_t) f8_19;
crypto_int64 f2f9_38 = f2 * (crypto_int64) f9_38; int64_t f2f9_38 = f2 * (int64_t) f9_38;
crypto_int64 f3f3_2 = f3_2 * (crypto_int64) f3; int64_t f3f3_2 = f3_2 * (int64_t) f3;
crypto_int64 f3f4_2 = f3_2 * (crypto_int64) f4; int64_t f3f4_2 = f3_2 * (int64_t) f4;
crypto_int64 f3f5_4 = f3_2 * (crypto_int64) f5_2; int64_t f3f5_4 = f3_2 * (int64_t) f5_2;
crypto_int64 f3f6_2 = f3_2 * (crypto_int64) f6; int64_t f3f6_2 = f3_2 * (int64_t) f6;
crypto_int64 f3f7_76 = f3_2 * (crypto_int64) f7_38; int64_t f3f7_76 = f3_2 * (int64_t) f7_38;
crypto_int64 f3f8_38 = f3_2 * (crypto_int64) f8_19; int64_t f3f8_38 = f3_2 * (int64_t) f8_19;
crypto_int64 f3f9_76 = f3_2 * (crypto_int64) f9_38; int64_t f3f9_76 = f3_2 * (int64_t) f9_38;
crypto_int64 f4f4 = f4 * (crypto_int64) f4; int64_t f4f4 = f4 * (int64_t) f4;
crypto_int64 f4f5_2 = f4_2 * (crypto_int64) f5; int64_t f4f5_2 = f4_2 * (int64_t) f5;
crypto_int64 f4f6_38 = f4_2 * (crypto_int64) f6_19; int64_t f4f6_38 = f4_2 * (int64_t) f6_19;
crypto_int64 f4f7_38 = f4 * (crypto_int64) f7_38; int64_t f4f7_38 = f4 * (int64_t) f7_38;
crypto_int64 f4f8_38 = f4_2 * (crypto_int64) f8_19; int64_t f4f8_38 = f4_2 * (int64_t) f8_19;
crypto_int64 f4f9_38 = f4 * (crypto_int64) f9_38; int64_t f4f9_38 = f4 * (int64_t) f9_38;
crypto_int64 f5f5_38 = f5 * (crypto_int64) f5_38; int64_t f5f5_38 = f5 * (int64_t) f5_38;
crypto_int64 f5f6_38 = f5_2 * (crypto_int64) f6_19; int64_t f5f6_38 = f5_2 * (int64_t) f6_19;
crypto_int64 f5f7_76 = f5_2 * (crypto_int64) f7_38; int64_t f5f7_76 = f5_2 * (int64_t) f7_38;
crypto_int64 f5f8_38 = f5_2 * (crypto_int64) f8_19; int64_t f5f8_38 = f5_2 * (int64_t) f8_19;
crypto_int64 f5f9_76 = f5_2 * (crypto_int64) f9_38; int64_t f5f9_76 = f5_2 * (int64_t) f9_38;
crypto_int64 f6f6_19 = f6 * (crypto_int64) f6_19; int64_t f6f6_19 = f6 * (int64_t) f6_19;
crypto_int64 f6f7_38 = f6 * (crypto_int64) f7_38; int64_t f6f7_38 = f6 * (int64_t) f7_38;
crypto_int64 f6f8_38 = f6_2 * (crypto_int64) f8_19; int64_t f6f8_38 = f6_2 * (int64_t) f8_19;
crypto_int64 f6f9_38 = f6 * (crypto_int64) f9_38; int64_t f6f9_38 = f6 * (int64_t) f9_38;
crypto_int64 f7f7_38 = f7 * (crypto_int64) f7_38; int64_t f7f7_38 = f7 * (int64_t) f7_38;
crypto_int64 f7f8_38 = f7_2 * (crypto_int64) f8_19; int64_t f7f8_38 = f7_2 * (int64_t) f8_19;
crypto_int64 f7f9_76 = f7_2 * (crypto_int64) f9_38; int64_t f7f9_76 = f7_2 * (int64_t) f9_38;
crypto_int64 f8f8_19 = f8 * (crypto_int64) f8_19; int64_t f8f8_19 = f8 * (int64_t) f8_19;
crypto_int64 f8f9_38 = f8 * (crypto_int64) f9_38; int64_t f8f9_38 = f8 * (int64_t) f9_38;
crypto_int64 f9f9_38 = f9 * (crypto_int64) f9_38; int64_t f9f9_38 = f9 * (int64_t) f9_38;
crypto_int64 h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; int64_t h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38;
crypto_int64 h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; int64_t h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38;
crypto_int64 h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; int64_t h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19;
crypto_int64 h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; int64_t h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38;
crypto_int64 h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; int64_t h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38;
crypto_int64 h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; int64_t h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38;
crypto_int64 h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; int64_t h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19;
crypto_int64 h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; int64_t h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38;
crypto_int64 h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; int64_t h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38;
crypto_int64 h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; int64_t h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2;
crypto_int64 carry0; int64_t carry0;
crypto_int64 carry1; int64_t carry1;
crypto_int64 carry2; int64_t carry2;
crypto_int64 carry3; int64_t carry3;
crypto_int64 carry4; int64_t carry4;
crypto_int64 carry5; int64_t carry5;
crypto_int64 carry6; int64_t carry6;
crypto_int64 carry7; int64_t carry7;
crypto_int64 carry8; int64_t carry8;
crypto_int64 carry9; int64_t carry9;
h0 += h0; h0 += h0;
h1 += h1; h1 += h1;
@ -128,33 +128,33 @@ void fe_sq2(fe h,const fe f)
h8 += h8; h8 += h8;
h9 += h9; h9 += h9;
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; carry1 = (h1 + (int64_t) (1L << 24)) >> 25; h2 += carry1; h1 -= carry1 * ((uint64_t) 1L << 25);
carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; carry5 = (h5 + (int64_t) (1L << 24)) >> 25; h6 += carry5; h5 -= carry5 * ((uint64_t) 1L << 25);
carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; carry2 = (h2 + (int64_t) (1L << 25)) >> 26; h3 += carry2; h2 -= carry2 * ((uint64_t) 1L << 26);
carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; carry6 = (h6 + (int64_t) (1L << 25)) >> 26; h7 += carry6; h6 -= carry6 * ((uint64_t) 1L << 26);
carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; carry3 = (h3 + (int64_t) (1L << 24)) >> 25; h4 += carry3; h3 -= carry3 * ((uint64_t) 1L << 25);
carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; carry7 = (h7 + (int64_t) (1L << 24)) >> 25; h8 += carry7; h7 -= carry7 * ((uint64_t) 1L << 25);
carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = (h4 + (int64_t) (1L << 25)) >> 26; h5 += carry4; h4 -= carry4 * ((uint64_t) 1L << 26);
carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; carry8 = (h8 + (int64_t) (1L << 25)) >> 26; h9 += carry8; h8 -= carry8 * ((uint64_t) 1L << 26);
carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; carry9 = (h9 + (int64_t) (1L << 24)) >> 25; h0 += carry9 * 19; h9 -= carry9 * ((uint64_t) 1L << 25);
carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = (h0 + (int64_t) (1L << 25)) >> 26; h1 += carry0; h0 -= carry0 * ((uint64_t) 1L << 26);
h[0] = h0; h[0] = (int32_t) h0;
h[1] = h1; h[1] = (int32_t) h1;
h[2] = h2; h[2] = (int32_t) h2;
h[3] = h3; h[3] = (int32_t) h3;
h[4] = h4; h[4] = (int32_t) h4;
h[5] = h5; h[5] = (int32_t) h5;
h[6] = h6; h[6] = (int32_t) h6;
h[7] = h7; h[7] = (int32_t) h7;
h[8] = h8; h[8] = (int32_t) h8;
h[9] = h9; h[9] = (int32_t) h9;
} }

View File

@ -1,4 +1,5 @@
#include "fe.h" #include "fe.h"
#include <stdint.h>
/* /*
Preconditions: Preconditions:
@ -27,29 +28,29 @@ Proof:
void fe_tobytes(unsigned char *s,const fe h) void fe_tobytes(unsigned char *s,const fe h)
{ {
crypto_int32 h0 = h[0]; int32_t h0 = h[0];
crypto_int32 h1 = h[1]; int32_t h1 = h[1];
crypto_int32 h2 = h[2]; int32_t h2 = h[2];
crypto_int32 h3 = h[3]; int32_t h3 = h[3];
crypto_int32 h4 = h[4]; int32_t h4 = h[4];
crypto_int32 h5 = h[5]; int32_t h5 = h[5];
crypto_int32 h6 = h[6]; int32_t h6 = h[6];
crypto_int32 h7 = h[7]; int32_t h7 = h[7];
crypto_int32 h8 = h[8]; int32_t h8 = h[8];
crypto_int32 h9 = h[9]; int32_t h9 = h[9];
crypto_int32 q; int32_t q;
crypto_int32 carry0; int32_t carry0;
crypto_int32 carry1; int32_t carry1;
crypto_int32 carry2; int32_t carry2;
crypto_int32 carry3; int32_t carry3;
crypto_int32 carry4; int32_t carry4;
crypto_int32 carry5; int32_t carry5;
crypto_int32 carry6; int32_t carry6;
crypto_int32 carry7; int32_t carry7;
crypto_int32 carry8; int32_t carry8;
crypto_int32 carry9; int32_t carry9;
q = (19 * h9 + (((crypto_int32) 1) << 24)) >> 25; q = (19 * h9 + ((uint32_t) 1L << 24)) >> 25;
q = (h0 + q) >> 26; q = (h0 + q) >> 26;
q = (h1 + q) >> 25; q = (h1 + q) >> 25;
q = (h2 + q) >> 26; q = (h2 + q) >> 26;
@ -65,16 +66,16 @@ void fe_tobytes(unsigned char *s,const fe h)
h0 += 19 * q; h0 += 19 * q;
/* Goal: Output h-2^255 q, which is between 0 and 2^255-20. */ /* Goal: Output h-2^255 q, which is between 0 and 2^255-20. */
carry0 = h0 >> 26; h1 += carry0; h0 -= carry0 << 26; carry0 = h0 >> 26; h1 += carry0; h0 -= carry0 * ((uint32_t) 1L << 26);
carry1 = h1 >> 25; h2 += carry1; h1 -= carry1 << 25; carry1 = h1 >> 25; h2 += carry1; h1 -= carry1 * ((uint32_t) 1L << 25);
carry2 = h2 >> 26; h3 += carry2; h2 -= carry2 << 26; carry2 = h2 >> 26; h3 += carry2; h2 -= carry2 * ((uint32_t) 1L << 26);
carry3 = h3 >> 25; h4 += carry3; h3 -= carry3 << 25; carry3 = h3 >> 25; h4 += carry3; h3 -= carry3 * ((uint32_t) 1L << 25);
carry4 = h4 >> 26; h5 += carry4; h4 -= carry4 << 26; carry4 = h4 >> 26; h5 += carry4; h4 -= carry4 * ((uint32_t) 1L << 26);
carry5 = h5 >> 25; h6 += carry5; h5 -= carry5 << 25; carry5 = h5 >> 25; h6 += carry5; h5 -= carry5 * ((uint32_t) 1L << 25);
carry6 = h6 >> 26; h7 += carry6; h6 -= carry6 << 26; carry6 = h6 >> 26; h7 += carry6; h6 -= carry6 * ((uint32_t) 1L << 26);
carry7 = h7 >> 25; h8 += carry7; h7 -= carry7 << 25; carry7 = h7 >> 25; h8 += carry7; h7 -= carry7 * ((uint32_t) 1L << 25);
carry8 = h8 >> 26; h9 += carry8; h8 -= carry8 << 26; carry8 = h8 >> 26; h9 += carry8; h8 -= carry8 * ((uint32_t) 1L << 26);
carry9 = h9 >> 25; h9 -= carry9 << 25; carry9 = h9 >> 25; h9 -= carry9 * ((uint32_t) 1L << 25);
/* h10 = carry9 */ /* h10 = carry9 */
/* /*
@ -87,32 +88,32 @@ void fe_tobytes(unsigned char *s,const fe h)
s[0] = h0 >> 0; s[0] = h0 >> 0;
s[1] = h0 >> 8; s[1] = h0 >> 8;
s[2] = h0 >> 16; s[2] = h0 >> 16;
s[3] = (h0 >> 24) | (h1 << 2); s[3] = (h0 >> 24) | (h1 * ((uint32_t) 1 << 2));
s[4] = h1 >> 6; s[4] = h1 >> 6;
s[5] = h1 >> 14; s[5] = h1 >> 14;
s[6] = (h1 >> 22) | (h2 << 3); s[6] = (h1 >> 22) | (h2 * ((uint32_t) 1 << 3));
s[7] = h2 >> 5; s[7] = h2 >> 5;
s[8] = h2 >> 13; s[8] = h2 >> 13;
s[9] = (h2 >> 21) | (h3 << 5); s[9] = (h2 >> 21) | (h3 * ((uint32_t) 1 << 5));
s[10] = h3 >> 3; s[10] = h3 >> 3;
s[11] = h3 >> 11; s[11] = h3 >> 11;
s[12] = (h3 >> 19) | (h4 << 6); s[12] = (h3 >> 19) | (h4 * ((uint32_t) 1 << 6));
s[13] = h4 >> 2; s[13] = h4 >> 2;
s[14] = h4 >> 10; s[14] = h4 >> 10;
s[15] = h4 >> 18; s[15] = h4 >> 18;
s[16] = h5 >> 0; s[16] = h5 >> 0;
s[17] = h5 >> 8; s[17] = h5 >> 8;
s[18] = h5 >> 16; s[18] = h5 >> 16;
s[19] = (h5 >> 24) | (h6 << 1); s[19] = (h5 >> 24) | (h6 * ((uint32_t) 1 << 1));
s[20] = h6 >> 7; s[20] = h6 >> 7;
s[21] = h6 >> 15; s[21] = h6 >> 15;
s[22] = (h6 >> 23) | (h7 << 3); s[22] = (h6 >> 23) | (h7 * ((uint32_t) 1 << 3));
s[23] = h7 >> 5; s[23] = h7 >> 5;
s[24] = h7 >> 13; s[24] = h7 >> 13;
s[25] = (h7 >> 21) | (h8 << 4); s[25] = (h7 >> 21) | (h8 * ((uint32_t) 1 << 4));
s[26] = h8 >> 4; s[26] = h8 >> 4;
s[27] = h8 >> 12; s[27] = h8 >> 12;
s[28] = (h8 >> 20) | (h9 << 6); s[28] = (h8 >> 20) | (h9 * ((uint32_t) 1 << 6));
s[29] = h9 >> 2; s[29] = h9 >> 2;
s[30] = h9 >> 10; s[30] = h9 >> 10;
s[31] = h9 >> 18; s[31] = h9 >> 18;

View File

@ -35,8 +35,7 @@ static void select(ge_precomp *t,int pos,signed char b)
{ {
ge_precomp minust; ge_precomp minust;
unsigned char bnegative = negative(b); unsigned char bnegative = negative(b);
unsigned char babs = b - (((-bnegative) & b) << 1); unsigned char babs = b - (((-bnegative) & b) * ((signed char) 1 << 1));
ge_precomp_0(t); ge_precomp_0(t);
cmov(t,&base[pos][0],equal(babs,1)); cmov(t,&base[pos][0],equal(babs,1));
cmov(t,&base[pos][1],equal(babs,2)); cmov(t,&base[pos][1],equal(babs,2));

View File

@ -2,6 +2,7 @@
#include "crypto_int64.h" #include "crypto_int64.h"
#include "crypto_uint32.h" #include "crypto_uint32.h"
#include "crypto_uint64.h" #include "crypto_uint64.h"
#include <stdint.h>
static crypto_uint64 load_3(const unsigned char *in) static crypto_uint64 load_3(const unsigned char *in)
{ {
@ -33,91 +34,92 @@ Output:
where l = 2^252 + 27742317777372353535851937790883648493. where l = 2^252 + 27742317777372353535851937790883648493.
*/ */
void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,const unsigned char *c) void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,const unsigned char *c)
{ {
crypto_int64 a0 = 2097151 & load_3(a); int64_t a0 = 2097151 & load_3(a);
crypto_int64 a1 = 2097151 & (load_4(a + 2) >> 5); int64_t a1 = 2097151 & (load_4(a + 2) >> 5);
crypto_int64 a2 = 2097151 & (load_3(a + 5) >> 2); int64_t a2 = 2097151 & (load_3(a + 5) >> 2);
crypto_int64 a3 = 2097151 & (load_4(a + 7) >> 7); int64_t a3 = 2097151 & (load_4(a + 7) >> 7);
crypto_int64 a4 = 2097151 & (load_4(a + 10) >> 4); int64_t a4 = 2097151 & (load_4(a + 10) >> 4);
crypto_int64 a5 = 2097151 & (load_3(a + 13) >> 1); int64_t a5 = 2097151 & (load_3(a + 13) >> 1);
crypto_int64 a6 = 2097151 & (load_4(a + 15) >> 6); int64_t a6 = 2097151 & (load_4(a + 15) >> 6);
crypto_int64 a7 = 2097151 & (load_3(a + 18) >> 3); int64_t a7 = 2097151 & (load_3(a + 18) >> 3);
crypto_int64 a8 = 2097151 & load_3(a + 21); int64_t a8 = 2097151 & load_3(a + 21);
crypto_int64 a9 = 2097151 & (load_4(a + 23) >> 5); int64_t a9 = 2097151 & (load_4(a + 23) >> 5);
crypto_int64 a10 = 2097151 & (load_3(a + 26) >> 2); int64_t a10 = 2097151 & (load_3(a + 26) >> 2);
crypto_int64 a11 = (load_4(a + 28) >> 7); int64_t a11 = (load_4(a + 28) >> 7);
crypto_int64 b0 = 2097151 & load_3(b); int64_t b0 = 2097151 & load_3(b);
crypto_int64 b1 = 2097151 & (load_4(b + 2) >> 5); int64_t b1 = 2097151 & (load_4(b + 2) >> 5);
crypto_int64 b2 = 2097151 & (load_3(b + 5) >> 2); int64_t b2 = 2097151 & (load_3(b + 5) >> 2);
crypto_int64 b3 = 2097151 & (load_4(b + 7) >> 7); int64_t b3 = 2097151 & (load_4(b + 7) >> 7);
crypto_int64 b4 = 2097151 & (load_4(b + 10) >> 4); int64_t b4 = 2097151 & (load_4(b + 10) >> 4);
crypto_int64 b5 = 2097151 & (load_3(b + 13) >> 1); int64_t b5 = 2097151 & (load_3(b + 13) >> 1);
crypto_int64 b6 = 2097151 & (load_4(b + 15) >> 6); int64_t b6 = 2097151 & (load_4(b + 15) >> 6);
crypto_int64 b7 = 2097151 & (load_3(b + 18) >> 3); int64_t b7 = 2097151 & (load_3(b + 18) >> 3);
crypto_int64 b8 = 2097151 & load_3(b + 21); int64_t b8 = 2097151 & load_3(b + 21);
crypto_int64 b9 = 2097151 & (load_4(b + 23) >> 5); int64_t b9 = 2097151 & (load_4(b + 23) >> 5);
crypto_int64 b10 = 2097151 & (load_3(b + 26) >> 2); int64_t b10 = 2097151 & (load_3(b + 26) >> 2);
crypto_int64 b11 = (load_4(b + 28) >> 7); int64_t b11 = (load_4(b + 28) >> 7);
crypto_int64 c0 = 2097151 & load_3(c); int64_t c0 = 2097151 & load_3(c);
crypto_int64 c1 = 2097151 & (load_4(c + 2) >> 5); int64_t c1 = 2097151 & (load_4(c + 2) >> 5);
crypto_int64 c2 = 2097151 & (load_3(c + 5) >> 2); int64_t c2 = 2097151 & (load_3(c + 5) >> 2);
crypto_int64 c3 = 2097151 & (load_4(c + 7) >> 7); int64_t c3 = 2097151 & (load_4(c + 7) >> 7);
crypto_int64 c4 = 2097151 & (load_4(c + 10) >> 4); int64_t c4 = 2097151 & (load_4(c + 10) >> 4);
crypto_int64 c5 = 2097151 & (load_3(c + 13) >> 1); int64_t c5 = 2097151 & (load_3(c + 13) >> 1);
crypto_int64 c6 = 2097151 & (load_4(c + 15) >> 6); int64_t c6 = 2097151 & (load_4(c + 15) >> 6);
crypto_int64 c7 = 2097151 & (load_3(c + 18) >> 3); int64_t c7 = 2097151 & (load_3(c + 18) >> 3);
crypto_int64 c8 = 2097151 & load_3(c + 21); int64_t c8 = 2097151 & load_3(c + 21);
crypto_int64 c9 = 2097151 & (load_4(c + 23) >> 5); int64_t c9 = 2097151 & (load_4(c + 23) >> 5);
crypto_int64 c10 = 2097151 & (load_3(c + 26) >> 2); int64_t c10 = 2097151 & (load_3(c + 26) >> 2);
crypto_int64 c11 = (load_4(c + 28) >> 7); int64_t c11 = (load_4(c + 28) >> 7);
crypto_int64 s0; int64_t s0;
crypto_int64 s1; int64_t s1;
crypto_int64 s2; int64_t s2;
crypto_int64 s3; int64_t s3;
crypto_int64 s4; int64_t s4;
crypto_int64 s5; int64_t s5;
crypto_int64 s6; int64_t s6;
crypto_int64 s7; int64_t s7;
crypto_int64 s8; int64_t s8;
crypto_int64 s9; int64_t s9;
crypto_int64 s10; int64_t s10;
crypto_int64 s11; int64_t s11;
crypto_int64 s12; int64_t s12;
crypto_int64 s13; int64_t s13;
crypto_int64 s14; int64_t s14;
crypto_int64 s15; int64_t s15;
crypto_int64 s16; int64_t s16;
crypto_int64 s17; int64_t s17;
crypto_int64 s18; int64_t s18;
crypto_int64 s19; int64_t s19;
crypto_int64 s20; int64_t s20;
crypto_int64 s21; int64_t s21;
crypto_int64 s22; int64_t s22;
crypto_int64 s23; int64_t s23;
crypto_int64 carry0; int64_t carry0;
crypto_int64 carry1; int64_t carry1;
crypto_int64 carry2; int64_t carry2;
crypto_int64 carry3; int64_t carry3;
crypto_int64 carry4; int64_t carry4;
crypto_int64 carry5; int64_t carry5;
crypto_int64 carry6; int64_t carry6;
crypto_int64 carry7; int64_t carry7;
crypto_int64 carry8; int64_t carry8;
crypto_int64 carry9; int64_t carry9;
crypto_int64 carry10; int64_t carry10;
crypto_int64 carry11; int64_t carry11;
crypto_int64 carry12; int64_t carry12;
crypto_int64 carry13; int64_t carry13;
crypto_int64 carry14; int64_t carry14;
crypto_int64 carry15; int64_t carry15;
crypto_int64 carry16; int64_t carry16;
crypto_int64 carry17; int64_t carry17;
crypto_int64 carry18; int64_t carry18;
crypto_int64 carry19; int64_t carry19;
crypto_int64 carry20; int64_t carry20;
crypto_int64 carry21; int64_t carry21;
crypto_int64 carry22; int64_t carry22;
s0 = c0 + a0*b0; s0 = c0 + a0*b0;
s1 = c1 + a0*b1 + a1*b0; s1 = c1 + a0*b1 + a1*b0;
@ -144,30 +146,30 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s22 = a11*b11; s22 = a11*b11;
s23 = 0; s23 = 0;
carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = (s0 + (int64_t) (1L << 20)) >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = (s2 + (int64_t) (1L << 20)) >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = (s4 + (int64_t) (1L << 20)) >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = (s6 + (int64_t) (1L << 20)) >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = (s8 + (int64_t) (1L << 20)) >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = (s10 + (int64_t) (1L << 20)) >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry12 = (s12 + (int64_t) (1L << 20)) >> 21; s13 += carry12; s12 -= carry12 * ((uint64_t) 1L << 21);
carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry14 = (s14 + (int64_t) (1L << 20)) >> 21; s15 += carry14; s14 -= carry14 * ((uint64_t) 1L << 21);
carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry16 = (s16 + (int64_t) (1L << 20)) >> 21; s17 += carry16; s16 -= carry16 * ((uint64_t) 1L << 21);
carry18 = (s18 + (1<<20)) >> 21; s19 += carry18; s18 -= carry18 << 21; carry18 = (s18 + (int64_t) (1L << 20)) >> 21; s19 += carry18; s18 -= carry18 * ((uint64_t) 1L << 21);
carry20 = (s20 + (1<<20)) >> 21; s21 += carry20; s20 -= carry20 << 21; carry20 = (s20 + (int64_t) (1L << 20)) >> 21; s21 += carry20; s20 -= carry20 * ((uint64_t) 1L << 21);
carry22 = (s22 + (1<<20)) >> 21; s23 += carry22; s22 -= carry22 << 21; carry22 = (s22 + (int64_t) (1L << 20)) >> 21; s23 += carry22; s22 -= carry22 * ((uint64_t) 1L << 21);
carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = (s1 + (int64_t) (1L << 20)) >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = (s3 + (int64_t) (1L << 20)) >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = (s5 + (int64_t) (1L << 20)) >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = (s7 + (int64_t) (1L << 20)) >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = (s9 + (int64_t) (1L << 20)) >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = (s11 + (int64_t) (1L << 20)) >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry13 = (s13 + (int64_t) (1L << 20)) >> 21; s14 += carry13; s13 -= carry13 * ((uint64_t) 1L << 21);
carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; carry15 = (s15 + (int64_t) (1L << 20)) >> 21; s16 += carry15; s15 -= carry15 * ((uint64_t) 1L << 21);
carry17 = (s17 + (1<<20)) >> 21; s18 += carry17; s17 -= carry17 << 21; carry17 = (s17 + (int64_t) (1L << 20)) >> 21; s18 += carry17; s17 -= carry17 * ((uint64_t) 1L << 21);
carry19 = (s19 + (1<<20)) >> 21; s20 += carry19; s19 -= carry19 << 21; carry19 = (s19 + (int64_t) (1L << 20)) >> 21; s20 += carry19; s19 -= carry19 * ((uint64_t) 1L << 21);
carry21 = (s21 + (1<<20)) >> 21; s22 += carry21; s21 -= carry21 << 21; carry21 = (s21 + (int64_t) (1L << 20)) >> 21; s22 += carry21; s21 -= carry21 * ((uint64_t) 1L << 21);
s11 += s23 * 666643; s11 += s23 * 666643;
s12 += s23 * 470296; s12 += s23 * 470296;
@ -175,7 +177,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s14 -= s23 * 997805; s14 -= s23 * 997805;
s15 += s23 * 136657; s15 += s23 * 136657;
s16 -= s23 * 683901; s16 -= s23 * 683901;
s23 = 0;
s10 += s22 * 666643; s10 += s22 * 666643;
s11 += s22 * 470296; s11 += s22 * 470296;
@ -183,7 +184,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s13 -= s22 * 997805; s13 -= s22 * 997805;
s14 += s22 * 136657; s14 += s22 * 136657;
s15 -= s22 * 683901; s15 -= s22 * 683901;
s22 = 0;
s9 += s21 * 666643; s9 += s21 * 666643;
s10 += s21 * 470296; s10 += s21 * 470296;
@ -191,7 +191,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s12 -= s21 * 997805; s12 -= s21 * 997805;
s13 += s21 * 136657; s13 += s21 * 136657;
s14 -= s21 * 683901; s14 -= s21 * 683901;
s21 = 0;
s8 += s20 * 666643; s8 += s20 * 666643;
s9 += s20 * 470296; s9 += s20 * 470296;
@ -199,7 +198,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s11 -= s20 * 997805; s11 -= s20 * 997805;
s12 += s20 * 136657; s12 += s20 * 136657;
s13 -= s20 * 683901; s13 -= s20 * 683901;
s20 = 0;
s7 += s19 * 666643; s7 += s19 * 666643;
s8 += s19 * 470296; s8 += s19 * 470296;
@ -207,7 +205,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s10 -= s19 * 997805; s10 -= s19 * 997805;
s11 += s19 * 136657; s11 += s19 * 136657;
s12 -= s19 * 683901; s12 -= s19 * 683901;
s19 = 0;
s6 += s18 * 666643; s6 += s18 * 666643;
s7 += s18 * 470296; s7 += s18 * 470296;
@ -215,20 +212,19 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s9 -= s18 * 997805; s9 -= s18 * 997805;
s10 += s18 * 136657; s10 += s18 * 136657;
s11 -= s18 * 683901; s11 -= s18 * 683901;
s18 = 0;
carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = (s6 + (int64_t) (1L << 20)) >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = (s8 + (int64_t) (1L << 20)) >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = (s10 + (int64_t) (1L << 20)) >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry12 = (s12 + (int64_t) (1L << 20)) >> 21; s13 += carry12; s12 -= carry12 * ((uint64_t) 1L << 21);
carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry14 = (s14 + (int64_t) (1L << 20)) >> 21; s15 += carry14; s14 -= carry14 * ((uint64_t) 1L << 21);
carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry16 = (s16 + (int64_t) (1L << 20)) >> 21; s17 += carry16; s16 -= carry16 * ((uint64_t) 1L << 21);
carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = (s7 + (int64_t) (1L << 20)) >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = (s9 + (int64_t) (1L << 20)) >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = (s11 + (int64_t) (1L << 20)) >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry13 = (s13 + (int64_t) (1L << 20)) >> 21; s14 += carry13; s13 -= carry13 * ((uint64_t) 1L << 21);
carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; carry15 = (s15 + (int64_t) (1L << 20)) >> 21; s16 += carry15; s15 -= carry15 * ((uint64_t) 1L << 21);
s5 += s17 * 666643; s5 += s17 * 666643;
s6 += s17 * 470296; s6 += s17 * 470296;
@ -236,7 +232,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s8 -= s17 * 997805; s8 -= s17 * 997805;
s9 += s17 * 136657; s9 += s17 * 136657;
s10 -= s17 * 683901; s10 -= s17 * 683901;
s17 = 0;
s4 += s16 * 666643; s4 += s16 * 666643;
s5 += s16 * 470296; s5 += s16 * 470296;
@ -244,7 +239,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s7 -= s16 * 997805; s7 -= s16 * 997805;
s8 += s16 * 136657; s8 += s16 * 136657;
s9 -= s16 * 683901; s9 -= s16 * 683901;
s16 = 0;
s3 += s15 * 666643; s3 += s15 * 666643;
s4 += s15 * 470296; s4 += s15 * 470296;
@ -252,7 +246,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s6 -= s15 * 997805; s6 -= s15 * 997805;
s7 += s15 * 136657; s7 += s15 * 136657;
s8 -= s15 * 683901; s8 -= s15 * 683901;
s15 = 0;
s2 += s14 * 666643; s2 += s14 * 666643;
s3 += s14 * 470296; s3 += s14 * 470296;
@ -260,7 +253,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s5 -= s14 * 997805; s5 -= s14 * 997805;
s6 += s14 * 136657; s6 += s14 * 136657;
s7 -= s14 * 683901; s7 -= s14 * 683901;
s14 = 0;
s1 += s13 * 666643; s1 += s13 * 666643;
s2 += s13 * 470296; s2 += s13 * 470296;
@ -268,7 +260,6 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s4 -= s13 * 997805; s4 -= s13 * 997805;
s5 += s13 * 136657; s5 += s13 * 136657;
s6 -= s13 * 683901; s6 -= s13 * 683901;
s13 = 0;
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -278,19 +269,19 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0; s12 = 0;
carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = (s0 + (int64_t) (1L << 20)) >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = (s2 + (int64_t) (1L << 20)) >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = (s4 + (int64_t) (1L << 20)) >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = (s6 + (int64_t) (1L << 20)) >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = (s8 + (int64_t) (1L << 20)) >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = (s10 + (int64_t) (1L << 20)) >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = (s1 + (int64_t) (1L << 20)) >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = (s3 + (int64_t) (1L << 20)) >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = (s5 + (int64_t) (1L << 20)) >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = (s7 + (int64_t) (1L << 20)) >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = (s9 + (int64_t) (1L << 20)) >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = (s11 + (int64_t) (1L << 20)) >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -300,18 +291,18 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0; s12 = 0;
carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -319,49 +310,48 @@ void sc_muladd(unsigned char *s,const unsigned char *a,const unsigned char *b,co
s3 -= s12 * 997805; s3 -= s12 * 997805;
s4 += s12 * 136657; s4 += s12 * 136657;
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0;
carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
s[0] = s0 >> 0; s[0] = s0 >> 0;
s[1] = s0 >> 8; s[1] = s0 >> 8;
s[2] = (s0 >> 16) | (s1 << 5); s[2] = (s0 >> 16) | (s1 * ((uint64_t) 1 << 5));
s[3] = s1 >> 3; s[3] = s1 >> 3;
s[4] = s1 >> 11; s[4] = s1 >> 11;
s[5] = (s1 >> 19) | (s2 << 2); s[5] = (s1 >> 19) | (s2 * ((uint64_t) 1 << 2));
s[6] = s2 >> 6; s[6] = s2 >> 6;
s[7] = (s2 >> 14) | (s3 << 7); s[7] = (s2 >> 14) | (s3 * ((uint64_t) 1 << 7));
s[8] = s3 >> 1; s[8] = s3 >> 1;
s[9] = s3 >> 9; s[9] = s3 >> 9;
s[10] = (s3 >> 17) | (s4 << 4); s[10] = (s3 >> 17) | (s4 * ((uint64_t) 1 << 4));
s[11] = s4 >> 4; s[11] = s4 >> 4;
s[12] = s4 >> 12; s[12] = s4 >> 12;
s[13] = (s4 >> 20) | (s5 << 1); s[13] = (s4 >> 20) | (s5 * ((uint64_t) 1 << 1));
s[14] = s5 >> 7; s[14] = s5 >> 7;
s[15] = (s5 >> 15) | (s6 << 6); s[15] = (s5 >> 15) | (s6 * ((uint64_t) 1 << 6));
s[16] = s6 >> 2; s[16] = s6 >> 2;
s[17] = s6 >> 10; s[17] = s6 >> 10;
s[18] = (s6 >> 18) | (s7 << 3); s[18] = (s6 >> 18) | (s7 * ((uint64_t) 1 << 3));
s[19] = s7 >> 5; s[19] = s7 >> 5;
s[20] = s7 >> 13; s[20] = s7 >> 13;
s[21] = s8 >> 0; s[21] = s8 >> 0;
s[22] = s8 >> 8; s[22] = s8 >> 8;
s[23] = (s8 >> 16) | (s9 << 5); s[23] = (s8 >> 16) | (s9 * ((uint64_t) 1 << 5));
s[24] = s9 >> 3; s[24] = s9 >> 3;
s[25] = s9 >> 11; s[25] = s9 >> 11;
s[26] = (s9 >> 19) | (s10 << 2); s[26] = (s9 >> 19) | (s10 * ((uint64_t) 1 << 2));
s[27] = s10 >> 6; s[27] = s10 >> 6;
s[28] = (s10 >> 14) | (s11 << 7); s[28] = (s10 >> 14) | (s11 * ((uint64_t) 1 << 7));
s[29] = s11 >> 1; s[29] = s11 >> 1;
s[30] = s11 >> 9; s[30] = s11 >> 9;
s[31] = s11 >> 17; s[31] = s11 >> 17;

View File

@ -1,4 +1,5 @@
#include "sc.h" #include "sc.h"
#include <stdint.h>
#include "crypto_int64.h" #include "crypto_int64.h"
#include "crypto_uint32.h" #include "crypto_uint32.h"
#include "crypto_uint64.h" #include "crypto_uint64.h"
@ -34,47 +35,47 @@ Output:
void sc_reduce(unsigned char *s) void sc_reduce(unsigned char *s)
{ {
crypto_int64 s0 = 2097151 & load_3(s); int64_t s0 = 2097151 & load_3(s);
crypto_int64 s1 = 2097151 & (load_4(s + 2) >> 5); int64_t s1 = 2097151 & (load_4(s + 2) >> 5);
crypto_int64 s2 = 2097151 & (load_3(s + 5) >> 2); int64_t s2 = 2097151 & (load_3(s + 5) >> 2);
crypto_int64 s3 = 2097151 & (load_4(s + 7) >> 7); int64_t s3 = 2097151 & (load_4(s + 7) >> 7);
crypto_int64 s4 = 2097151 & (load_4(s + 10) >> 4); int64_t s4 = 2097151 & (load_4(s + 10) >> 4);
crypto_int64 s5 = 2097151 & (load_3(s + 13) >> 1); int64_t s5 = 2097151 & (load_3(s + 13) >> 1);
crypto_int64 s6 = 2097151 & (load_4(s + 15) >> 6); int64_t s6 = 2097151 & (load_4(s + 15) >> 6);
crypto_int64 s7 = 2097151 & (load_3(s + 18) >> 3); int64_t s7 = 2097151 & (load_3(s + 18) >> 3);
crypto_int64 s8 = 2097151 & load_3(s + 21); int64_t s8 = 2097151 & load_3(s + 21);
crypto_int64 s9 = 2097151 & (load_4(s + 23) >> 5); int64_t s9 = 2097151 & (load_4(s + 23) >> 5);
crypto_int64 s10 = 2097151 & (load_3(s + 26) >> 2); int64_t s10 = 2097151 & (load_3(s + 26) >> 2);
crypto_int64 s11 = 2097151 & (load_4(s + 28) >> 7); int64_t s11 = 2097151 & (load_4(s + 28) >> 7);
crypto_int64 s12 = 2097151 & (load_4(s + 31) >> 4); int64_t s12 = 2097151 & (load_4(s + 31) >> 4);
crypto_int64 s13 = 2097151 & (load_3(s + 34) >> 1); int64_t s13 = 2097151 & (load_3(s + 34) >> 1);
crypto_int64 s14 = 2097151 & (load_4(s + 36) >> 6); int64_t s14 = 2097151 & (load_4(s + 36) >> 6);
crypto_int64 s15 = 2097151 & (load_3(s + 39) >> 3); int64_t s15 = 2097151 & (load_3(s + 39) >> 3);
crypto_int64 s16 = 2097151 & load_3(s + 42); int64_t s16 = 2097151 & load_3(s + 42);
crypto_int64 s17 = 2097151 & (load_4(s + 44) >> 5); int64_t s17 = 2097151 & (load_4(s + 44) >> 5);
crypto_int64 s18 = 2097151 & (load_3(s + 47) >> 2); int64_t s18 = 2097151 & (load_3(s + 47) >> 2);
crypto_int64 s19 = 2097151 & (load_4(s + 49) >> 7); int64_t s19 = 2097151 & (load_4(s + 49) >> 7);
crypto_int64 s20 = 2097151 & (load_4(s + 52) >> 4); int64_t s20 = 2097151 & (load_4(s + 52) >> 4);
crypto_int64 s21 = 2097151 & (load_3(s + 55) >> 1); int64_t s21 = 2097151 & (load_3(s + 55) >> 1);
crypto_int64 s22 = 2097151 & (load_4(s + 57) >> 6); int64_t s22 = 2097151 & (load_4(s + 57) >> 6);
crypto_int64 s23 = (load_4(s + 60) >> 3); int64_t s23 = (load_4(s + 60) >> 3);
crypto_int64 carry0; int64_t carry0;
crypto_int64 carry1; int64_t carry1;
crypto_int64 carry2; int64_t carry2;
crypto_int64 carry3; int64_t carry3;
crypto_int64 carry4; int64_t carry4;
crypto_int64 carry5; int64_t carry5;
crypto_int64 carry6; int64_t carry6;
crypto_int64 carry7; int64_t carry7;
crypto_int64 carry8; int64_t carry8;
crypto_int64 carry9; int64_t carry9;
crypto_int64 carry10; int64_t carry10;
crypto_int64 carry11; int64_t carry11;
crypto_int64 carry12; int64_t carry12;
crypto_int64 carry13; int64_t carry13;
crypto_int64 carry14; int64_t carry14;
crypto_int64 carry15; int64_t carry15;
crypto_int64 carry16; int64_t carry16;
s11 += s23 * 666643; s11 += s23 * 666643;
s12 += s23 * 470296; s12 += s23 * 470296;
@ -82,7 +83,6 @@ void sc_reduce(unsigned char *s)
s14 -= s23 * 997805; s14 -= s23 * 997805;
s15 += s23 * 136657; s15 += s23 * 136657;
s16 -= s23 * 683901; s16 -= s23 * 683901;
s23 = 0;
s10 += s22 * 666643; s10 += s22 * 666643;
s11 += s22 * 470296; s11 += s22 * 470296;
@ -90,7 +90,6 @@ void sc_reduce(unsigned char *s)
s13 -= s22 * 997805; s13 -= s22 * 997805;
s14 += s22 * 136657; s14 += s22 * 136657;
s15 -= s22 * 683901; s15 -= s22 * 683901;
s22 = 0;
s9 += s21 * 666643; s9 += s21 * 666643;
s10 += s21 * 470296; s10 += s21 * 470296;
@ -98,7 +97,6 @@ void sc_reduce(unsigned char *s)
s12 -= s21 * 997805; s12 -= s21 * 997805;
s13 += s21 * 136657; s13 += s21 * 136657;
s14 -= s21 * 683901; s14 -= s21 * 683901;
s21 = 0;
s8 += s20 * 666643; s8 += s20 * 666643;
s9 += s20 * 470296; s9 += s20 * 470296;
@ -106,7 +104,6 @@ void sc_reduce(unsigned char *s)
s11 -= s20 * 997805; s11 -= s20 * 997805;
s12 += s20 * 136657; s12 += s20 * 136657;
s13 -= s20 * 683901; s13 -= s20 * 683901;
s20 = 0;
s7 += s19 * 666643; s7 += s19 * 666643;
s8 += s19 * 470296; s8 += s19 * 470296;
@ -114,7 +111,6 @@ void sc_reduce(unsigned char *s)
s10 -= s19 * 997805; s10 -= s19 * 997805;
s11 += s19 * 136657; s11 += s19 * 136657;
s12 -= s19 * 683901; s12 -= s19 * 683901;
s19 = 0;
s6 += s18 * 666643; s6 += s18 * 666643;
s7 += s18 * 470296; s7 += s18 * 470296;
@ -122,20 +118,19 @@ void sc_reduce(unsigned char *s)
s9 -= s18 * 997805; s9 -= s18 * 997805;
s10 += s18 * 136657; s10 += s18 * 136657;
s11 -= s18 * 683901; s11 -= s18 * 683901;
s18 = 0;
carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = (s6 + (int64_t) (1L << 20)) >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = (s8 + (int64_t) (1L << 20)) >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = (s10 + (int64_t) (1L << 20)) >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry12 = (s12 + (int64_t) (1L << 20)) >> 21; s13 += carry12; s12 -= carry12 * ((uint64_t) 1L << 21);
carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry14 = (s14 + (int64_t) (1L << 20)) >> 21; s15 += carry14; s14 -= carry14 * ((uint64_t) 1L << 21);
carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry16 = (s16 + (int64_t) (1L << 20)) >> 21; s17 += carry16; s16 -= carry16 * ((uint64_t) 1L << 21);
carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = (s7 + (int64_t) (1L << 20)) >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = (s9 + (int64_t) (1L << 20)) >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = (s11 + (int64_t) (1L << 20)) >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry13 = (s13 + (int64_t) (1L << 20)) >> 21; s14 += carry13; s13 -= carry13 * ((uint64_t) 1L << 21);
carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; carry15 = (s15 + (int64_t) (1L << 20)) >> 21; s16 += carry15; s15 -= carry15 * ((uint64_t) 1L << 21);
s5 += s17 * 666643; s5 += s17 * 666643;
s6 += s17 * 470296; s6 += s17 * 470296;
@ -143,7 +138,6 @@ void sc_reduce(unsigned char *s)
s8 -= s17 * 997805; s8 -= s17 * 997805;
s9 += s17 * 136657; s9 += s17 * 136657;
s10 -= s17 * 683901; s10 -= s17 * 683901;
s17 = 0;
s4 += s16 * 666643; s4 += s16 * 666643;
s5 += s16 * 470296; s5 += s16 * 470296;
@ -151,7 +145,6 @@ void sc_reduce(unsigned char *s)
s7 -= s16 * 997805; s7 -= s16 * 997805;
s8 += s16 * 136657; s8 += s16 * 136657;
s9 -= s16 * 683901; s9 -= s16 * 683901;
s16 = 0;
s3 += s15 * 666643; s3 += s15 * 666643;
s4 += s15 * 470296; s4 += s15 * 470296;
@ -159,7 +152,6 @@ void sc_reduce(unsigned char *s)
s6 -= s15 * 997805; s6 -= s15 * 997805;
s7 += s15 * 136657; s7 += s15 * 136657;
s8 -= s15 * 683901; s8 -= s15 * 683901;
s15 = 0;
s2 += s14 * 666643; s2 += s14 * 666643;
s3 += s14 * 470296; s3 += s14 * 470296;
@ -167,7 +159,6 @@ void sc_reduce(unsigned char *s)
s5 -= s14 * 997805; s5 -= s14 * 997805;
s6 += s14 * 136657; s6 += s14 * 136657;
s7 -= s14 * 683901; s7 -= s14 * 683901;
s14 = 0;
s1 += s13 * 666643; s1 += s13 * 666643;
s2 += s13 * 470296; s2 += s13 * 470296;
@ -175,7 +166,6 @@ void sc_reduce(unsigned char *s)
s4 -= s13 * 997805; s4 -= s13 * 997805;
s5 += s13 * 136657; s5 += s13 * 136657;
s6 -= s13 * 683901; s6 -= s13 * 683901;
s13 = 0;
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -185,19 +175,19 @@ void sc_reduce(unsigned char *s)
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0; s12 = 0;
carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = (s0 + (int64_t) (1L << 20)) >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = (s2 + (int64_t) (1L << 20)) >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = (s4 + (int64_t) (1L << 20)) >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = (s6 + (int64_t) (1L << 20)) >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = (s8 + (int64_t) (1L << 20)) >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = (s10 + (int64_t) (1L << 20)) >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = (s1 + (int64_t) (1L << 20)) >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = (s3 + (int64_t) (1L << 20)) >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = (s5 + (int64_t) (1L << 20)) >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = (s7 + (int64_t) (1L << 20)) >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = (s9 + (int64_t) (1L << 20)) >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = (s11 + (int64_t) (1L << 20)) >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -207,18 +197,18 @@ void sc_reduce(unsigned char *s)
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0; s12 = 0;
carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 << 21; carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 * ((uint64_t) 1L << 21);
s0 += s12 * 666643; s0 += s12 * 666643;
s1 += s12 * 470296; s1 += s12 * 470296;
@ -226,49 +216,48 @@ void sc_reduce(unsigned char *s)
s3 -= s12 * 997805; s3 -= s12 * 997805;
s4 += s12 * 136657; s4 += s12 * 136657;
s5 -= s12 * 683901; s5 -= s12 * 683901;
s12 = 0;
carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 * ((uint64_t) 1L << 21);
carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 * ((uint64_t) 1L << 21);
carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 * ((uint64_t) 1L << 21);
carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 * ((uint64_t) 1L << 21);
carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 * ((uint64_t) 1L << 21);
carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 * ((uint64_t) 1L << 21);
carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 * ((uint64_t) 1L << 21);
carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 * ((uint64_t) 1L << 21);
carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 * ((uint64_t) 1L << 21);
carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 * ((uint64_t) 1L << 21);
carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 * ((uint64_t) 1L << 21);
s[0] = s0 >> 0; s[0] = s0 >> 0;
s[1] = s0 >> 8; s[1] = s0 >> 8;
s[2] = (s0 >> 16) | (s1 << 5); s[2] = (s0 >> 16) | (s1 * ((uint64_t) 1 << 5));
s[3] = s1 >> 3; s[3] = s1 >> 3;
s[4] = s1 >> 11; s[4] = s1 >> 11;
s[5] = (s1 >> 19) | (s2 << 2); s[5] = (s1 >> 19) | (s2 * ((uint64_t) 1 << 2));
s[6] = s2 >> 6; s[6] = s2 >> 6;
s[7] = (s2 >> 14) | (s3 << 7); s[7] = (s2 >> 14) | (s3 * ((uint64_t) 1 << 7));
s[8] = s3 >> 1; s[8] = s3 >> 1;
s[9] = s3 >> 9; s[9] = s3 >> 9;
s[10] = (s3 >> 17) | (s4 << 4); s[10] = (s3 >> 17) | (s4 * ((uint64_t) 1 << 4));
s[11] = s4 >> 4; s[11] = s4 >> 4;
s[12] = s4 >> 12; s[12] = s4 >> 12;
s[13] = (s4 >> 20) | (s5 << 1); s[13] = (s4 >> 20) | (s5 * ((uint64_t) 1 << 1));
s[14] = s5 >> 7; s[14] = s5 >> 7;
s[15] = (s5 >> 15) | (s6 << 6); s[15] = (s5 >> 15) | (s6 * ((uint64_t) 1 << 6));
s[16] = s6 >> 2; s[16] = s6 >> 2;
s[17] = s6 >> 10; s[17] = s6 >> 10;
s[18] = (s6 >> 18) | (s7 << 3); s[18] = (s6 >> 18) | (s7 * ((uint64_t) 1 << 3));
s[19] = s7 >> 5; s[19] = s7 >> 5;
s[20] = s7 >> 13; s[20] = s7 >> 13;
s[21] = s8 >> 0; s[21] = s8 >> 0;
s[22] = s8 >> 8; s[22] = s8 >> 8;
s[23] = (s8 >> 16) | (s9 << 5); s[23] = (s8 >> 16) | (s9 * ((uint64_t) 1 << 5));
s[24] = s9 >> 3; s[24] = s9 >> 3;
s[25] = s9 >> 11; s[25] = s9 >> 11;
s[26] = (s9 >> 19) | (s10 << 2); s[26] = (s9 >> 19) | (s10 * ((uint64_t) 1 << 2));
s[27] = s10 >> 6; s[27] = s10 >> 6;
s[28] = (s10 >> 14) | (s11 << 7); s[28] = (s10 >> 14) | (s11 * ((uint64_t) 1 << 7));
s[29] = s11 >> 1; s[29] = s11 >> 1;
s[30] = s11 >> 9; s[30] = s11 >> 9;
s[31] = s11 >> 17; s[31] = s11 >> 17;

View File

@ -732,9 +732,7 @@ struct rhizome_write_buffer
struct rhizome_write struct rhizome_write
{ {
rhizome_filehash_t id;
uint64_t temp_id; uint64_t temp_id;
char id_known;
uint64_t tail; uint64_t tail;
uint64_t file_offset; uint64_t file_offset;
uint64_t written_offset; uint64_t written_offset;
@ -742,14 +740,17 @@ struct rhizome_write
struct rhizome_write_buffer *buffer_list; struct rhizome_write_buffer *buffer_list;
size_t buffer_size; size_t buffer_size;
int crypt; struct crypto_hash_sha512_state sha512_context;
unsigned char key[RHIZOME_CRYPT_KEY_BYTES];
unsigned char nonce[crypto_stream_xsalsa20_NONCEBYTES];
SHA512_CTX sha512_context;
uint64_t blob_rowid; uint64_t blob_rowid;
int blob_fd; int blob_fd;
sqlite3_blob *sql_blob; sqlite3_blob *sql_blob;
rhizome_filehash_t id;
uint8_t id_known;
uint8_t crypt;
unsigned char key[RHIZOME_CRYPT_KEY_BYTES];
unsigned char nonce[crypto_stream_xsalsa20_NONCEBYTES];
}; };
struct rhizome_read_buffer{ struct rhizome_read_buffer{
@ -760,15 +761,8 @@ struct rhizome_read_buffer{
struct rhizome_read struct rhizome_read
{ {
rhizome_filehash_t id;
int crypt;
unsigned char key[RHIZOME_CRYPT_KEY_BYTES];
unsigned char nonce[crypto_stream_xsalsa20_NONCEBYTES];
uint64_t hash_offset; uint64_t hash_offset;
SHA512_CTX sha512_context; struct crypto_hash_sha512_state sha512_context;
char verified;
uint64_t blob_rowid; uint64_t blob_rowid;
int blob_fd; int blob_fd;
@ -776,6 +770,12 @@ struct rhizome_read
uint64_t tail; uint64_t tail;
uint64_t offset; uint64_t offset;
uint64_t length; uint64_t length;
int8_t verified;
uint8_t crypt;
rhizome_filehash_t id;
unsigned char key[RHIZOME_CRYPT_KEY_BYTES];
unsigned char nonce[crypto_stream_xsalsa20_NONCEBYTES];
}; };
int rhizome_received_content(const unsigned char *bidprefix,uint64_t version, int rhizome_received_content(const unsigned char *bidprefix,uint64_t version,

View File

@ -1186,8 +1186,8 @@ int rhizome_hash_file(rhizome_manifest *m, const char *path, rhizome_filehash_t
if (m && m->payloadEncryption == PAYLOAD_ENCRYPTED) if (m && m->payloadEncryption == PAYLOAD_ENCRYPTED)
return WHY("Encryption of payloads not implemented"); return WHY("Encryption of payloads not implemented");
uint64_t filesize = 0; uint64_t filesize = 0;
SHA512_CTX context; crypto_hash_sha512_state context;
SHA512_Init(&context); crypto_hash_sha512_init(&context);
if (path[0]) { if (path[0]) {
int fd = open(path, O_RDONLY); int fd = open(path, O_RDONLY);
if (fd == -1) if (fd == -1)
@ -1200,7 +1200,7 @@ int rhizome_hash_file(rhizome_manifest *m, const char *path, rhizome_filehash_t
close(fd); close(fd);
return -1; return -1;
} }
SHA512_Update(&context, buffer, (size_t) r); crypto_hash_sha512_update(&context, buffer, (size_t) r);
filesize += (size_t) r; filesize += (size_t) r;
} }
close(fd); close(fd);
@ -1208,13 +1208,12 @@ int rhizome_hash_file(rhizome_manifest *m, const char *path, rhizome_filehash_t
// Empty files (including empty path) have no hash. // Empty files (including empty path) have no hash.
if (hash_out) { if (hash_out) {
if (filesize > 0) if (filesize > 0)
SHA512_Final(hash_out->binary, &context); crypto_hash_sha512_final(&context, hash_out->binary);
else else
*hash_out = RHIZOME_FILEHASH_NONE; *hash_out = RHIZOME_FILEHASH_NONE;
} }
if (size_out) if (size_out)
*size_out = filesize; *size_out = filesize;
SHA512_End(&context, NULL);
return 0; return 0;
} }

View File

@ -333,7 +333,7 @@ enum rhizome_payload_status rhizome_open_write(struct rhizome_write *write, cons
write->file_length = file_length; write->file_length = file_length;
write->file_offset = 0; write->file_offset = 0;
write->written_offset = 0; write->written_offset = 0;
SHA512_Init(&write->sha512_context); crypto_hash_sha512_init(&write->sha512_context);
return RHIZOME_PAYLOAD_STATUS_NEW; return RHIZOME_PAYLOAD_STATUS_NEW;
} }
@ -362,7 +362,7 @@ static int prepare_data(struct rhizome_write *write_state, uint8_t *buffer, size
return -1; return -1;
} }
SHA512_Update(&write_state->sha512_context, buffer, data_size); crypto_hash_sha512_update(&write_state->sha512_context, buffer, data_size);
write_state->file_offset+=data_size; write_state->file_offset+=data_size;
DEBUGF(rhizome_store, "Processed %"PRIu64" of %"PRIu64, write_state->file_offset, write_state->file_length); DEBUGF(rhizome_store, "Processed %"PRIu64" of %"PRIu64, write_state->file_offset, write_state->file_length);
@ -711,8 +711,7 @@ enum rhizome_payload_status rhizome_finish_write(struct rhizome_write *write)
} }
rhizome_filehash_t hash_out; rhizome_filehash_t hash_out;
SHA512_Final(hash_out.binary, &write->sha512_context); crypto_hash_sha512_final(&write->sha512_context, hash_out.binary);
SHA512_End(&write->sha512_context, NULL);
if (write->id_known) { if (write->id_known) {
if (cmp_rhizome_filehash_t(&write->id, &hash_out) != 0) { if (cmp_rhizome_filehash_t(&write->id, &hash_out) != 0) {
@ -1029,7 +1028,7 @@ enum rhizome_payload_status rhizome_open_read(struct rhizome_read *read, const r
} }
DEBUGF(rhizome_store, "Opened stored file %s as fd %d, len %"PRIx64, blob_path, read->blob_fd, read->length); DEBUGF(rhizome_store, "Opened stored file %s as fd %d, len %"PRIx64, blob_path, read->blob_fd, read->length);
} }
SHA512_Init(&read->sha512_context); crypto_hash_sha512_init(&read->sha512_context);
return RHIZOME_PAYLOAD_STATUS_STORED; return RHIZOME_PAYLOAD_STATUS_STORED;
} }
@ -1095,14 +1094,13 @@ ssize_t rhizome_read(struct rhizome_read *read_state, unsigned char *buffer, siz
// hash the payload as we go, but only if we happen to read the payload data in order // hash the payload as we go, but only if we happen to read the payload data in order
if (read_state->hash_offset == read_state->offset && buffer && bytes_read>0){ if (read_state->hash_offset == read_state->offset && buffer && bytes_read>0){
SHA512_Update(&read_state->sha512_context, buffer, bytes_read); crypto_hash_sha512_update(&read_state->sha512_context, buffer, bytes_read);
read_state->hash_offset += bytes_read; read_state->hash_offset += bytes_read;
// if we hash everything and the hash doesn't match, we need to delete the payload // if we hash everything and the hash doesn't match, we need to delete the payload
if (read_state->hash_offset >= read_state->length){ if (read_state->hash_offset >= read_state->length){
rhizome_filehash_t hash_out; rhizome_filehash_t hash_out;
SHA512_Final(hash_out.binary, &read_state->sha512_context); crypto_hash_sha512_final(&read_state->sha512_context, hash_out.binary);
SHA512_End(&read_state->sha512_context, NULL);
if (cmp_rhizome_filehash_t(&read_state->id, &hash_out) != 0) { if (cmp_rhizome_filehash_t(&read_state->id, &hash_out) != 0) {
// hash failure, mark the payload as invalid // hash failure, mark the payload as invalid
read_state->verified = -1; read_state->verified = -1;

View File

@ -39,7 +39,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#define RHIZOME_BUNDLE_ID_STRLEN (RHIZOME_BUNDLE_ID_BYTES * 2) #define RHIZOME_BUNDLE_ID_STRLEN (RHIZOME_BUNDLE_ID_BYTES * 2)
#define RHIZOME_BUNDLE_KEY_BYTES (crypto_sign_edwards25519sha512batch_SECRETKEYBYTES - crypto_sign_edwards25519sha512batch_PUBLICKEYBYTES) #define RHIZOME_BUNDLE_KEY_BYTES (crypto_sign_edwards25519sha512batch_SECRETKEYBYTES - crypto_sign_edwards25519sha512batch_PUBLICKEYBYTES)
#define RHIZOME_BUNDLE_KEY_STRLEN (RHIZOME_BUNDLE_KEY_BYTES * 2) #define RHIZOME_BUNDLE_KEY_STRLEN (RHIZOME_BUNDLE_KEY_BYTES * 2)
#define RHIZOME_FILEHASH_BYTES SHA512_DIGEST_LENGTH #define RHIZOME_FILEHASH_BYTES crypto_hash_sha512_BYTES
#define RHIZOME_FILEHASH_STRLEN (RHIZOME_FILEHASH_BYTES * 2) #define RHIZOME_FILEHASH_STRLEN (RHIZOME_FILEHASH_BYTES * 2)
#define RHIZOME_CRYPT_KEY_BYTES crypto_stream_xsalsa20_ref_KEYBYTES #define RHIZOME_CRYPT_KEY_BYTES crypto_stream_xsalsa20_ref_KEYBYTES
#define RHIZOME_CRYPT_KEY_STRLEN (RHIZOME_CRYPT_KEY_BYTES * 2) #define RHIZOME_CRYPT_KEY_STRLEN (RHIZOME_CRYPT_KEY_BYTES * 2)

View File

@ -22,6 +22,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#include <sys/types.h> #include <sys/types.h>
#include <stdint.h> #include <stdint.h>
#include <inttypes.h>
/* Conveniences to assist readability /* Conveniences to assist readability
*/ */

1325
sha2.c

File diff suppressed because it is too large Load Diff

209
sha2.h
View File

@ -1,203 +1,34 @@
/* #ifndef __SERVALD_SHA512_H
* FILE: sha2.h #define __SERVALD_SHA512_H
* AUTHOR: Aaron D. Gifford - http://www.aarongifford.com/
*
* Copyright (c) 2000-2001, Aaron D. Gifford
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTOR(S) ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTOR(S) BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: sha2.h,v 1.1 2001/11/08 00:02:01 adg Exp adg $
*/
#ifndef __SHA2_H__
#define __SHA2_H__
#ifdef __cplusplus
extern "C" {
#endif
/* typedef struct crypto_hash_sha512_state {
* Import u_intXX_t size_t type definitions from system headers. You
* may need to change this, or define these things yourself in this
* file.
*/
#include <sys/types.h>
#ifdef SHA2_USE_INTTYPES_H
#include <inttypes.h>
#endif /* SHA2_USE_INTTYPES_H */
/*** SHA-256/384/512 Various Length Definitions ***********************/
#define SHA256_BLOCK_LENGTH 64
#define SHA256_DIGEST_LENGTH 32
#define SHA256_DIGEST_STRING_LENGTH (SHA256_DIGEST_LENGTH * 2 + 1)
#define SHA384_BLOCK_LENGTH 128
#define SHA384_DIGEST_LENGTH 48
#define SHA384_DIGEST_STRING_LENGTH (SHA384_DIGEST_LENGTH * 2 + 1)
#define SHA512_BLOCK_LENGTH 128
#define SHA512_DIGEST_LENGTH 64
#define SHA512_DIGEST_STRING_LENGTH (SHA512_DIGEST_LENGTH * 2 + 1)
/*** SHA-256/384/512 Context Structures *******************************/
/* NOTE: If your architecture does not define either u_intXX_t types or
* uintXX_t (from inttypes.h), you may need to define things by hand
* for your system:
*/
#if 0
typedef unsigned char u_int8_t; /* 1-byte (8-bits) */
typedef unsigned int u_int32_t; /* 4-bytes (32-bits) */
typedef unsigned long long u_int64_t; /* 8-bytes (64-bits) */
#endif
/*
* Most BSD systems already define u_intXX_t types, as does Linux.
* Some systems, however, like Compaq's Tru64 Unix instead can use
* uintXX_t types defined by very recent ANSI C standards and included
* in the file:
*
* #include <inttypes.h>
*
* If you choose to use <inttypes.h> then please define:
*
* #define SHA2_USE_INTTYPES_H
*
* Or on the command line during compile:
*
* cc -DSHA2_USE_INTTYPES_H ...
*/
#ifdef SHA2_USE_INTTYPES_H
typedef struct _SHA256_CTX {
uint32_t state[8];
uint64_t bitcount;
uint8_t buffer[SHA256_BLOCK_LENGTH];
} SHA256_CTX;
typedef struct _SHA512_CTX {
uint64_t state[8]; uint64_t state[8];
uint64_t bitcount[2]; uint64_t count[2];
uint8_t buffer[SHA512_BLOCK_LENGTH]; unsigned char buf[128];
} SHA512_CTX; } crypto_hash_sha512_state;
size_t crypto_hash_sha512_statebytes(void);
#else /* SHA2_USE_INTTYPES_H */ #ifndef crypto_hash_sha512_BYTES
#define crypto_hash_sha512_BYTES 64U
#endif
typedef struct _SHA256_CTX { size_t crypto_hash_sha512_bytes(void);
u_int32_t state[8];
u_int64_t bitcount;
u_int8_t buffer[SHA256_BLOCK_LENGTH];
} SHA256_CTX;
typedef struct _SHA512_CTX {
u_int64_t state[8];
u_int64_t bitcount[2];
u_int8_t buffer[SHA512_BLOCK_LENGTH];
} SHA512_CTX;
#endif /* SHA2_USE_INTTYPES_H */
typedef SHA512_CTX SHA384_CTX;
/*** SHA-256/384/512 Function Prototypes ******************************/ int crypto_hash_sha512(unsigned char *out, const unsigned char *in,
#ifndef NOPROTO unsigned long long inlen);
#ifdef SHA2_USE_INTTYPES_H
void SHA256_Init(SHA256_CTX *);
void SHA256_Update(SHA256_CTX*, const uint8_t*, size_t);
void SHA256_Final(uint8_t[SHA256_DIGEST_LENGTH], SHA256_CTX*);
void SHA256_Final_Len(uint8_t[], size_t, SHA256_CTX*);
char* SHA256_End(SHA256_CTX*, char[SHA256_DIGEST_STRING_LENGTH]);
char* SHA256_Data(const uint8_t*, size_t, char[SHA256_DIGEST_STRING_LENGTH]);
void SHA384_Init(SHA384_CTX*); int crypto_hash_sha512_init(crypto_hash_sha512_state *state);
void SHA384_Update(SHA384_CTX*, const uint8_t*, size_t);
void SHA384_Final(uint8_t[SHA384_DIGEST_LENGTH], SHA384_CTX*);
void SHA384_Final_Len(uint8_t[], size_t, SHA384_CTX*);
char* SHA384_End(SHA384_CTX*, char[SHA384_DIGEST_STRING_LENGTH]);
char* SHA384_Data(const uint8_t*, size_t, char[SHA384_DIGEST_STRING_LENGTH]);
void SHA512_Init(SHA512_CTX*);
void SHA512_Update(SHA512_CTX*, const uint8_t*, size_t);
void SHA512_Final(uint8_t[SHA512_DIGEST_LENGTH], SHA512_CTX*);
void SHA512_Final_Len(uint8_t[], size_t, SHA512_CTX*);
char* SHA512_End(SHA512_CTX*, char[SHA512_DIGEST_STRING_LENGTH]);
char* SHA512_Data(const uint8_t*, size_t, char[SHA512_DIGEST_STRING_LENGTH]);
#else /* SHA2_USE_INTTYPES_H */ int crypto_hash_sha512_update(crypto_hash_sha512_state *state,
const unsigned char *in,
unsigned long long inlen);
void SHA256_Init(SHA256_CTX *);
void SHA256_Update(SHA256_CTX*, const u_int8_t*, size_t);
void SHA256_Final(u_int8_t[SHA256_DIGEST_LENGTH], SHA256_CTX*);
void SHA256_Final_Len(u_int8_t[], size_t, SHA256_CTX*);
char* SHA256_End(SHA256_CTX*, char[SHA256_DIGEST_STRING_LENGTH]);
char* SHA256_Data(const u_int8_t*, size_t, char[SHA256_DIGEST_STRING_LENGTH]);
void SHA384_Init(SHA384_CTX*); int crypto_hash_sha512_final(crypto_hash_sha512_state *state,
void SHA384_Update(SHA384_CTX*, const u_int8_t*, size_t); unsigned char *out);
void SHA384_Final(u_int8_t[SHA384_DIGEST_LENGTH], SHA384_CTX*);
void SHA384_Final_Len(u_int8_t[], size_t, SHA384_CTX*);
char* SHA384_End(SHA384_CTX*, char[SHA384_DIGEST_STRING_LENGTH]);
char* SHA384_Data(const u_int8_t*, size_t, char[SHA384_DIGEST_STRING_LENGTH]);
void SHA512_Init(SHA512_CTX*);
void SHA512_Update(SHA512_CTX*, const u_int8_t*, size_t);
void SHA512_Final(u_int8_t[SHA512_DIGEST_LENGTH], SHA512_CTX*);
void SHA512_Final_Len(u_int8_t[], size_t, SHA512_CTX*);
char* SHA512_End(SHA512_CTX*, char[SHA512_DIGEST_STRING_LENGTH]);
char* SHA512_Data(const u_int8_t*, size_t, char[SHA512_DIGEST_STRING_LENGTH]);
#endif /* SHA2_USE_INTTYPES_H */
#else /* NOPROTO */
void SHA256_Init();
void SHA256_Update();
void SHA256_Final();
char* SHA256_End();
char* SHA256_Data();
void SHA384_Init();
void SHA384_Update();
void SHA384_Final();
char* SHA384_End();
char* SHA384_Data();
void SHA512_Init();
void SHA512_Update();
void SHA512_Final();
char* SHA512_End();
char* SHA512_Data();
#endif /* NOPROTO */
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __SHA2_H__ */
#endif

14
str.c
View File

@ -1133,14 +1133,16 @@ void str_digest_passphrase(unsigned char *dstBinary, size_t dstsiz, const char *
void strn_digest_passphrase(unsigned char *dstBinary, size_t dstsiz, const char *passphrase, size_t passlen) void strn_digest_passphrase(unsigned char *dstBinary, size_t dstsiz, const char *passphrase, size_t passlen)
{ {
assert(dstsiz <= SERVAL_PASSPHRASE_DIGEST_MAX_BINARY); assert(dstsiz <= SERVAL_PASSPHRASE_DIGEST_MAX_BINARY);
SHA512_CTX context; crypto_hash_sha512_state context;
static const char salt1[] = "Sago pudding"; static const char salt1[] = "Sago pudding";
static const char salt2[] = "Rhubarb pie"; static const char salt2[] = "Rhubarb pie";
SHA512_Init(&context); crypto_hash_sha512_init(&context);
SHA512_Update(&context, (unsigned char *)salt1, sizeof salt1 - 1); crypto_hash_sha512_update(&context, (unsigned char *)salt1, sizeof salt1 - 1);
SHA512_Update(&context, (unsigned char *)passphrase, passlen); crypto_hash_sha512_update(&context, (unsigned char *)passphrase, passlen);
SHA512_Update(&context, (unsigned char *)salt2, sizeof salt2 - 1); crypto_hash_sha512_update(&context, (unsigned char *)salt2, sizeof salt2 - 1);
SHA512_Final_Len(dstBinary, dstsiz, &context); unsigned char hash[crypto_hash_sha512_BYTES];
crypto_hash_sha512_final(&context, hash);
bcopy(hash, dstBinary, dstsiz);
} }
/* Return true if the string resembles a URI. /* Return true if the string resembles a URI.