3 * Copyright (C) 2006-2015 wolfSSL Inc.
5 * This file is part of wolfSSL. (formerly known as CyaSSL)
7 * wolfSSL is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * wolfSSL is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
23 #ifdef TFM_SMALL_MONT_SET
24 /* computes x/R == x (mod N) via Montgomery Reduction */
25 void fp_montgomery_reduce_small(fp_int *a, fp_int *m, fp_digit mp)
27 fp_digit c[FP_SIZE], *_c, *tmpm, mu, cy;
28 int oldused, x, y, pa;
30 #if defined(USE_MEMSET)
31 /* now zero the buff */
32 memset(c, 0, sizeof c);
38 for (x = 0; x < oldused; x++) {
41 #if !defined(USE_MEMSET)
42 for (; x < 2*pa+3; x++) {
685 INNERMUL8; _c += 8; tmpm += 8;
706 INNERMUL8; _c += 8; tmpm += 8;
727 INNERMUL8; _c += 8; tmpm += 8;
748 INNERMUL8; _c += 8; tmpm += 8;
769 INNERMUL8; _c += 8; tmpm += 8;
790 INNERMUL8; _c += 8; tmpm += 8;
811 INNERMUL8; _c += 8; tmpm += 8;
832 INNERMUL8; _c += 8; tmpm += 8;
855 INNERMUL8; _c += 8; tmpm += 8;
878 INNERMUL8; _c += 8; tmpm += 8;
901 INNERMUL8; _c += 8; tmpm += 8;
924 INNERMUL8; _c += 8; tmpm += 8;
947 INNERMUL8; _c += 8; tmpm += 8;
970 INNERMUL8; _c += 8; tmpm += 8;
993 INNERMUL8; _c += 8; tmpm += 8;
1016 INNERMUL8; _c += 8; tmpm += 8;
1039 INNERMUL8; _c += 8; tmpm += 8;
1064 INNERMUL8; _c += 8; tmpm += 8;
1089 INNERMUL8; _c += 8; tmpm += 8;
1114 INNERMUL8; _c += 8; tmpm += 8;
1139 INNERMUL8; _c += 8; tmpm += 8;
1164 INNERMUL8; _c += 8; tmpm += 8;
1189 INNERMUL8; _c += 8; tmpm += 8;
1214 INNERMUL8; _c += 8; tmpm += 8;
1239 INNERMUL8; _c += 8; tmpm += 8;
1264 INNERMUL8; _c += 8; tmpm += 8;
1289 INNERMUL8; _c += 8; tmpm += 8;
1316 INNERMUL8; _c += 8; tmpm += 8;
1343 INNERMUL8; _c += 8; tmpm += 8;
1370 INNERMUL8; _c += 8; tmpm += 8;
1397 INNERMUL8; _c += 8; tmpm += 8;
1424 INNERMUL8; _c += 8; tmpm += 8;
1451 INNERMUL8; _c += 8; tmpm += 8;
1478 INNERMUL8; _c += 8; tmpm += 8;
1505 INNERMUL8; _c += 8; tmpm += 8;
1532 INNERMUL8; _c += 8; tmpm += 8;
1559 INNERMUL8; _c += 8; tmpm += 8;
1586 INNERMUL8; _c += 8; tmpm += 8;
1615 INNERMUL8; _c += 8; tmpm += 8;
1644 INNERMUL8; _c += 8; tmpm += 8;
1673 INNERMUL8; _c += 8; tmpm += 8;
1702 INNERMUL8; _c += 8; tmpm += 8;
1731 INNERMUL8; _c += 8; tmpm += 8;
1760 INNERMUL8; _c += 8; tmpm += 8;
1789 INNERMUL8; _c += 8; tmpm += 8;
1818 INNERMUL8; _c += 8; tmpm += 8;
1847 INNERMUL8; _c += 8; tmpm += 8;
1876 INNERMUL8; _c += 8; tmpm += 8;
1905 INNERMUL8; _c += 8; tmpm += 8;
1934 INNERMUL8; _c += 8; tmpm += 8;
1965 INNERMUL8; _c += 8; tmpm += 8;
1996 INNERMUL8; _c += 8; tmpm += 8;
2027 INNERMUL8; _c += 8; tmpm += 8;
2058 INNERMUL8; _c += 8; tmpm += 8;
2089 INNERMUL8; _c += 8; tmpm += 8;
2120 INNERMUL8; _c += 8; tmpm += 8;
2151 INNERMUL8; _c += 8; tmpm += 8;
2182 INNERMUL8; _c += 8; tmpm += 8;
2213 INNERMUL8; _c += 8; tmpm += 8;
2244 INNERMUL8; _c += 8; tmpm += 8;
2275 INNERMUL8; _c += 8; tmpm += 8;
2306 INNERMUL8; _c += 8; tmpm += 8;
2337 INNERMUL8; _c += 8; tmpm += 8;
2370 INNERMUL8; _c += 8; tmpm += 8;
2403 INNERMUL8; _c += 8; tmpm += 8;
2436 INNERMUL8; _c += 8; tmpm += 8;
2469 INNERMUL8; _c += 8; tmpm += 8;
2502 INNERMUL8; _c += 8; tmpm += 8;
2535 INNERMUL8; _c += 8; tmpm += 8;
2568 INNERMUL8; _c += 8; tmpm += 8;
2601 INNERMUL8; _c += 8; tmpm += 8;
2634 INNERMUL8; _c += 8; tmpm += 8;
2667 INNERMUL8; _c += 8; tmpm += 8;
2700 INNERMUL8; _c += 8; tmpm += 8;
2733 INNERMUL8; _c += 8; tmpm += 8;
2766 INNERMUL8; _c += 8; tmpm += 8;
2799 INNERMUL8; _c += 8; tmpm += 8;
2834 INNERMUL8; _c += 8; tmpm += 8;
2869 INNERMUL8; _c += 8; tmpm += 8;
2904 INNERMUL8; _c += 8; tmpm += 8;
2939 INNERMUL8; _c += 8; tmpm += 8;
2974 INNERMUL8; _c += 8; tmpm += 8;
3009 INNERMUL8; _c += 8; tmpm += 8;
3044 INNERMUL8; _c += 8; tmpm += 8;
3079 INNERMUL8; _c += 8; tmpm += 8;
3114 INNERMUL8; _c += 8; tmpm += 8;
3149 INNERMUL8; _c += 8; tmpm += 8;
3184 INNERMUL8; _c += 8; tmpm += 8;
3219 INNERMUL8; _c += 8; tmpm += 8;
3254 INNERMUL8; _c += 8; tmpm += 8;
3289 INNERMUL8; _c += 8; tmpm += 8;
3324 INNERMUL8; _c += 8; tmpm += 8;
3361 INNERMUL8; _c += 8; tmpm += 8;
3362 INNERMUL8; _c += 8; tmpm += 8;
3391 INNERMUL8; _c += 8; tmpm += 8;
3392 INNERMUL8; _c += 8; tmpm += 8;
3421 INNERMUL8; _c += 8; tmpm += 8;
3422 INNERMUL8; _c += 8; tmpm += 8;
3451 INNERMUL8; _c += 8; tmpm += 8;
3452 INNERMUL8; _c += 8; tmpm += 8;
3481 INNERMUL8; _c += 8; tmpm += 8;
3482 INNERMUL8; _c += 8; tmpm += 8;
3511 INNERMUL8; _c += 8; tmpm += 8;
3512 INNERMUL8; _c += 8; tmpm += 8;
3541 INNERMUL8; _c += 8; tmpm += 8;
3542 INNERMUL8; _c += 8; tmpm += 8;
3571 INNERMUL8; _c += 8; tmpm += 8;
3572 INNERMUL8; _c += 8; tmpm += 8;
3601 INNERMUL8; _c += 8; tmpm += 8;
3602 INNERMUL8; _c += 8; tmpm += 8;
3631 INNERMUL8; _c += 8; tmpm += 8;
3632 INNERMUL8; _c += 8; tmpm += 8;
3661 INNERMUL8; _c += 8; tmpm += 8;
3662 INNERMUL8; _c += 8; tmpm += 8;
3691 INNERMUL8; _c += 8; tmpm += 8;
3692 INNERMUL8; _c += 8; tmpm += 8;
3721 INNERMUL8; _c += 8; tmpm += 8;
3722 INNERMUL8; _c += 8; tmpm += 8;
3751 INNERMUL8; _c += 8; tmpm += 8;
3752 INNERMUL8; _c += 8; tmpm += 8;
3781 INNERMUL8; _c += 8; tmpm += 8;
3782 INNERMUL8; _c += 8; tmpm += 8;
3811 INNERMUL8; _c += 8; tmpm += 8;
3812 INNERMUL8; _c += 8; tmpm += 8;
3841 for (x = 0; x < pa+1; x++) {
3845 for (; x < oldused; x++) {
3854 /* if A >= m then A = A - m */
3855 if (fp_cmp_mag (a, m) != FP_LT) {