summaryrefslogtreecommitdiff
path: root/cipher/sha256-ssse3-amd64.S
diff options
context:
space:
mode:
Diffstat (limited to 'cipher/sha256-ssse3-amd64.S')
-rw-r--r--cipher/sha256-ssse3-amd64.S11
1 files changed, 6 insertions, 5 deletions
diff --git a/cipher/sha256-ssse3-amd64.S b/cipher/sha256-ssse3-amd64.S
index 9b27f8f7..80b1cec4 100644
--- a/cipher/sha256-ssse3-amd64.S
+++ b/cipher/sha256-ssse3-amd64.S
@@ -206,7 +206,7 @@ a = TMP_
add h, y1 /* h = h + S1 + CH + k + w + S0 */
por XTMP1, XTMP2 /* XTMP1 = W[-15] ror 7 */
or y0, y2 /* y0 = MAJ = (a|c)&b)|(a&c) */
- add h, y0 /* h = h + S1 + CH + k + w + S0 + MAJ */
+ lea h, [h + y0] /* h = h + S1 + CH + k + w + S0 + MAJ */
ROTATE_ARGS
movdqa XTMP2, XTMP3 /* XTMP2 = W[-15] */
@@ -247,7 +247,7 @@ ROTATE_ARGS
add h, y1 /* h = h + S1 + CH + k + w + S0 */
paddd XTMP0, XTMP1 /* XTMP0 = W[-16] + W[-7] + s0 */
or y0, y2 /* y0 = MAJ = (a|c)&b)|(a&c) */
- add h, y0 /* h = h + S1 + CH + k + w + S0 + MAJ */
+ lea h, [h + y0] /* h = h + S1 + CH + k + w + S0 + MAJ */
ROTATE_ARGS
movdqa XTMP3, XTMP2 /* XTMP3 = W[-2] {BBAA} */
@@ -288,7 +288,7 @@ ROTATE_ARGS
/* compute high s1 */
pshufd XTMP2, XTMP0, 0b01010000 /* XTMP2 = W[-2] {DDCC} */
or y0, y2 /* y0 = MAJ = (a|c)&b)|(a&c) */
- add h, y0 /* h = h + S1 + CH + k + w + S0 + MAJ */
+ lea h, [h + y0] /* h = h + S1 + CH + k + w + S0 + MAJ */
ROTATE_ARGS
movdqa XTMP3, XTMP2 /* XTMP3 = W[-2] {DDCC} */
@@ -327,7 +327,7 @@ ROTATE_ARGS
and y0, b /* y0 = (a|c)&b */
add h, y1 /* h = h + S1 + CH + k + w + S0 */
or y0, y2 /* y0 = MAJ = (a|c)&b)|(a&c) */
- add h, y0 /* h = h + S1 + CH + k + w + S0 + MAJ */
+ lea h, [h + y0] /* h = h + S1 + CH + k + w + S0 + MAJ */
ROTATE_ARGS
rotate_Xs
@@ -362,7 +362,7 @@ rotate_Xs
and y0, b /* y0 = (a|c)&b */
add h, y1 /* h = h + S1 + CH + k + w + S0 */
or y0, y2 /* y0 = MAJ = (a|c)&b)|(a&c) */
- add h, y0 /* h = h + S1 + CH + k + w + S0 + MAJ */
+ lea h, [h + y0] /* h = h + S1 + CH + k + w + S0 + MAJ */
ROTATE_ARGS
.endm
@@ -505,6 +505,7 @@ _gcry_sha256_transform_amd64_ssse3:
pop rbx
mov eax, STACK_SIZE + 5*8
+
ret