summaryrefslogtreecommitdiff
path: root/cipher/cast5.c
diff options
context:
space:
mode:
Diffstat (limited to 'cipher/cast5.c')
-rw-r--r--cipher/cast5.c67
1 files changed, 62 insertions, 5 deletions
diff --git a/cipher/cast5.c b/cipher/cast5.c
index 115e1e62..94dcee76 100644
--- a/cipher/cast5.c
+++ b/cipher/cast5.c
@@ -48,7 +48,8 @@
/* USE_AMD64_ASM indicates whether to use AMD64 assembly code. */
#undef USE_AMD64_ASM
-#if defined(__x86_64__) && defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS)
+#if defined(__x86_64__) && (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
+ defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS))
# define USE_AMD64_ASM 1
#endif
@@ -372,16 +373,72 @@ extern void _gcry_cast5_amd64_cbc_dec(CAST5_context *ctx, byte *out,
extern void _gcry_cast5_amd64_cfb_dec(CAST5_context *ctx, byte *out,
const byte *in, byte *iv);
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+static inline void
+call_sysv_fn (const void *fn, const void *arg1, const void *arg2,
+ const void *arg3, const void *arg4)
+{
+ /* Call SystemV ABI function without storing non-volatile XMM registers,
+ * as target function does not use vector instruction sets. */
+ asm volatile ("callq *%0\n\t"
+ : "+a" (fn),
+ "+D" (arg1),
+ "+S" (arg2),
+ "+d" (arg3),
+ "+c" (arg4)
+ :
+ : "cc", "memory", "r8", "r9", "r10", "r11");
+}
+#endif
+
static void
do_encrypt_block (CAST5_context *context, byte *outbuf, const byte *inbuf)
{
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+ call_sysv_fn (_gcry_cast5_amd64_encrypt_block, context, outbuf, inbuf, NULL);
+#else
_gcry_cast5_amd64_encrypt_block (context, outbuf, inbuf);
+#endif
}
static void
do_decrypt_block (CAST5_context *context, byte *outbuf, const byte *inbuf)
{
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+ call_sysv_fn (_gcry_cast5_amd64_decrypt_block, context, outbuf, inbuf, NULL);
+#else
_gcry_cast5_amd64_decrypt_block (context, outbuf, inbuf);
+#endif
+}
+
+static void
+cast5_amd64_ctr_enc(CAST5_context *ctx, byte *out, const byte *in, byte *ctr)
+{
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+ call_sysv_fn (_gcry_cast5_amd64_ctr_enc, ctx, out, in, ctr);
+#else
+ _gcry_cast5_amd64_ctr_enc (ctx, out, in, ctr);
+#endif
+}
+
+static void
+cast5_amd64_cbc_dec(CAST5_context *ctx, byte *out, const byte *in, byte *iv)
+{
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+ call_sysv_fn (_gcry_cast5_amd64_cbc_dec, ctx, out, in, iv);
+#else
+ _gcry_cast5_amd64_cbc_dec (ctx, out, in, iv);
+#endif
+}
+
+static void
+cast5_amd64_cfb_dec(CAST5_context *ctx, byte *out, const byte *in, byte *iv)
+{
+#ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS
+ call_sysv_fn (_gcry_cast5_amd64_cfb_dec, ctx, out, in, iv);
+#else
+ _gcry_cast5_amd64_cfb_dec (ctx, out, in, iv);
+#endif
}
static unsigned int
@@ -396,7 +453,7 @@ static unsigned int
decrypt_block (void *context, byte *outbuf, const byte *inbuf)
{
CAST5_context *c = (CAST5_context *) context;
- _gcry_cast5_amd64_decrypt_block (c, outbuf, inbuf);
+ do_decrypt_block (c, outbuf, inbuf);
return /*burn_stack*/ (2*8);
}
@@ -582,7 +639,7 @@ _gcry_cast5_ctr_enc(void *context, unsigned char *ctr, void *outbuf_arg,
/* Process data in 4 block chunks. */
while (nblocks >= 4)
{
- _gcry_cast5_amd64_ctr_enc(ctx, outbuf, inbuf, ctr);
+ cast5_amd64_ctr_enc(ctx, outbuf, inbuf, ctr);
nblocks -= 4;
outbuf += 4 * CAST5_BLOCKSIZE;
@@ -651,7 +708,7 @@ _gcry_cast5_cbc_dec(void *context, unsigned char *iv, void *outbuf_arg,
/* Process data in 4 block chunks. */
while (nblocks >= 4)
{
- _gcry_cast5_amd64_cbc_dec(ctx, outbuf, inbuf, iv);
+ cast5_amd64_cbc_dec(ctx, outbuf, inbuf, iv);
nblocks -= 4;
outbuf += 4 * CAST5_BLOCKSIZE;
@@ -710,7 +767,7 @@ _gcry_cast5_cfb_dec(void *context, unsigned char *iv, void *outbuf_arg,
/* Process data in 4 block chunks. */
while (nblocks >= 4)
{
- _gcry_cast5_amd64_cfb_dec(ctx, outbuf, inbuf, iv);
+ cast5_amd64_cfb_dec(ctx, outbuf, inbuf, iv);
nblocks -= 4;
outbuf += 4 * CAST5_BLOCKSIZE;