summaryrefslogtreecommitdiff
path: root/tcg/optimize.c
diff options
context:
space:
mode:
Diffstat (limited to 'tcg/optimize.c')
-rw-r--r--tcg/optimize.c260
1 files changed, 52 insertions, 208 deletions
diff --git a/tcg/optimize.c b/tcg/optimize.c
index 7eb5eb1c70..7e7f2b2020 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -31,14 +31,9 @@
#include "qemu-common.h"
#include "tcg-op.h"
-#if TCG_TARGET_REG_BITS == 64
#define CASE_OP_32_64(x) \
glue(glue(case INDEX_op_, x), _i32): \
glue(glue(case INDEX_op_, x), _i64)
-#else
-#define CASE_OP_32_64(x) \
- glue(glue(case INDEX_op_, x), _i32)
-#endif
typedef enum {
TCG_TEMP_UNDEF = 0,
@@ -92,81 +87,10 @@ static void reset_temp(TCGArg temp, int nb_temps, int nb_globals)
}
}
-static int op_bits(int op)
+static int op_bits(enum TCGOpcode op)
{
- switch (op) {
- case INDEX_op_mov_i32:
- case INDEX_op_add_i32:
- case INDEX_op_sub_i32:
- case INDEX_op_mul_i32:
- case INDEX_op_and_i32:
- case INDEX_op_or_i32:
- case INDEX_op_xor_i32:
- case INDEX_op_shl_i32:
- case INDEX_op_shr_i32:
- case INDEX_op_sar_i32:
-#ifdef TCG_TARGET_HAS_rot_i32
- case INDEX_op_rotl_i32:
- case INDEX_op_rotr_i32:
-#endif
-#ifdef TCG_TARGET_HAS_not_i32
- case INDEX_op_not_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8s_i32
- case INDEX_op_ext8s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i32
- case INDEX_op_ext16s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i32
- case INDEX_op_ext8u_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i32
- case INDEX_op_ext16u_i32:
-#endif
- return 32;
-#if TCG_TARGET_REG_BITS == 64
- case INDEX_op_mov_i64:
- case INDEX_op_add_i64:
- case INDEX_op_sub_i64:
- case INDEX_op_mul_i64:
- case INDEX_op_and_i64:
- case INDEX_op_or_i64:
- case INDEX_op_xor_i64:
- case INDEX_op_shl_i64:
- case INDEX_op_shr_i64:
- case INDEX_op_sar_i64:
-#ifdef TCG_TARGET_HAS_rot_i64
- case INDEX_op_rotl_i64:
- case INDEX_op_rotr_i64:
-#endif
-#ifdef TCG_TARGET_HAS_not_i64
- case INDEX_op_not_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext8s_i64
- case INDEX_op_ext8s_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i64
- case INDEX_op_ext16s_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext32s_i64
- case INDEX_op_ext32s_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i64
- case INDEX_op_ext8u_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i64
- case INDEX_op_ext16u_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext32u_i64
- case INDEX_op_ext32u_i64:
-#endif
- return 64;
-#endif
- default:
- fprintf(stderr, "Unrecognized operation %d in op_bits.\n", op);
- tcg_abort();
- }
+ const TCGOpDef *def = &tcg_op_defs[op];
+ return def->flags & TCG_OPF_64BIT ? 64 : 32;
}
static int op_to_movi(int op)
@@ -174,10 +98,8 @@ static int op_to_movi(int op)
switch (op_bits(op)) {
case 32:
return INDEX_op_movi_i32;
-#if TCG_TARGET_REG_BITS == 64
case 64:
return INDEX_op_movi_i64;
-#endif
default:
fprintf(stderr, "op_to_movi: unexpected return value of "
"function op_bits.\n");
@@ -226,10 +148,8 @@ static int op_to_mov(int op)
switch (op_bits(op)) {
case 32:
return INDEX_op_mov_i32;
-#if TCG_TARGET_REG_BITS == 64
case 64:
return INDEX_op_mov_i64;
-#endif
default:
fprintf(stderr, "op_to_mov: unexpected return value of "
"function op_bits.\n");
@@ -261,124 +181,75 @@ static TCGArg do_constant_folding_2(int op, TCGArg x, TCGArg y)
case INDEX_op_shl_i32:
return (uint32_t)x << (uint32_t)y;
-#if TCG_TARGET_REG_BITS == 64
case INDEX_op_shl_i64:
return (uint64_t)x << (uint64_t)y;
-#endif
case INDEX_op_shr_i32:
return (uint32_t)x >> (uint32_t)y;
-#if TCG_TARGET_REG_BITS == 64
case INDEX_op_shr_i64:
return (uint64_t)x >> (uint64_t)y;
-#endif
case INDEX_op_sar_i32:
return (int32_t)x >> (int32_t)y;
-#if TCG_TARGET_REG_BITS == 64
case INDEX_op_sar_i64:
return (int64_t)x >> (int64_t)y;
-#endif
-#ifdef TCG_TARGET_HAS_rot_i32
case INDEX_op_rotr_i32:
-#if TCG_TARGET_REG_BITS == 64
- x &= 0xffffffff;
- y &= 0xffffffff;
-#endif
- x = (x << (32 - y)) | (x >> y);
+ x = ((uint32_t)x << (32 - y)) | ((uint32_t)x >> y);
return x;
-#endif
-#ifdef TCG_TARGET_HAS_rot_i64
-#if TCG_TARGET_REG_BITS == 64
case INDEX_op_rotr_i64:
- x = (x << (64 - y)) | (x >> y);
+ x = ((uint64_t)x << (64 - y)) | ((uint64_t)x >> y);
return x;
-#endif
-#endif
-#ifdef TCG_TARGET_HAS_rot_i32
case INDEX_op_rotl_i32:
-#if TCG_TARGET_REG_BITS == 64
- x &= 0xffffffff;
- y &= 0xffffffff;
-#endif
- x = (x << y) | (x >> (32 - y));
+ x = ((uint32_t)x << y) | ((uint32_t)x >> (32 - y));
return x;
-#endif
-#ifdef TCG_TARGET_HAS_rot_i64
-#if TCG_TARGET_REG_BITS == 64
case INDEX_op_rotl_i64:
- x = (x << y) | (x >> (64 - y));
+ x = ((uint64_t)x << y) | ((uint64_t)x >> (64 - y));
return x;
-#endif
-#endif
-
-#if defined(TCG_TARGET_HAS_not_i32) || defined(TCG_TARGET_HAS_not_i64)
-#ifdef TCG_TARGET_HAS_not_i32
- case INDEX_op_not_i32:
-#endif
-#ifdef TCG_TARGET_HAS_not_i64
- case INDEX_op_not_i64:
-#endif
+
+ CASE_OP_32_64(not):
return ~x;
-#endif
-
-#if defined(TCG_TARGET_HAS_ext8s_i32) || defined(TCG_TARGET_HAS_ext8s_i64)
-#ifdef TCG_TARGET_HAS_ext8s_i32
- case INDEX_op_ext8s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8s_i64
- case INDEX_op_ext8s_i64:
-#endif
+
+ CASE_OP_32_64(neg):
+ return -x;
+
+ CASE_OP_32_64(andc):
+ return x & ~y;
+
+ CASE_OP_32_64(orc):
+ return x | ~y;
+
+ CASE_OP_32_64(eqv):
+ return ~(x ^ y);
+
+ CASE_OP_32_64(nand):
+ return ~(x & y);
+
+ CASE_OP_32_64(nor):
+ return ~(x | y);
+
+ CASE_OP_32_64(ext8s):
return (int8_t)x;
-#endif
-
-#if defined(TCG_TARGET_HAS_ext16s_i32) || defined(TCG_TARGET_HAS_ext16s_i64)
-#ifdef TCG_TARGET_HAS_ext16s_i32
- case INDEX_op_ext16s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i64
- case INDEX_op_ext16s_i64:
-#endif
+
+ CASE_OP_32_64(ext16s):
return (int16_t)x;
-#endif
-
-#if defined(TCG_TARGET_HAS_ext8u_i32) || defined(TCG_TARGET_HAS_ext8u_i64)
-#ifdef TCG_TARGET_HAS_ext8u_i32
- case INDEX_op_ext8u_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i64
- case INDEX_op_ext8u_i64:
-#endif
+
+ CASE_OP_32_64(ext8u):
return (uint8_t)x;
-#endif
-
-#if defined(TCG_TARGET_HAS_ext16u_i32) || defined(TCG_TARGET_HAS_ext16u_i64)
-#ifdef TCG_TARGET_HAS_ext16u_i32
- case INDEX_op_ext16u_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i64
- case INDEX_op_ext16u_i64:
-#endif
+
+ CASE_OP_32_64(ext16u):
return (uint16_t)x;
-#endif
-#if TCG_TARGET_REG_BITS == 64
-#ifdef TCG_TARGET_HAS_ext32s_i64
case INDEX_op_ext32s_i64:
return (int32_t)x;
-#endif
-#ifdef TCG_TARGET_HAS_ext32u_i64
case INDEX_op_ext32u_i64:
return (uint32_t)x;
-#endif
-#endif
default:
fprintf(stderr,
@@ -390,11 +261,9 @@ static TCGArg do_constant_folding_2(int op, TCGArg x, TCGArg y)
static TCGArg do_constant_folding(int op, TCGArg x, TCGArg y)
{
TCGArg res = do_constant_folding_2(op, x, y);
-#if TCG_TARGET_REG_BITS == 64
if (op_bits(op) == 32) {
res &= 0xffffffff;
}
-#endif
return res;
}
@@ -439,6 +308,9 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
CASE_OP_32_64(and):
CASE_OP_32_64(or):
CASE_OP_32_64(xor):
+ CASE_OP_32_64(eqv):
+ CASE_OP_32_64(nand):
+ CASE_OP_32_64(nor):
if (temps[args[1]].state == TCG_TEMP_CONST) {
tmp = args[1];
args[1] = args[2];
@@ -456,14 +328,8 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
CASE_OP_32_64(shl):
CASE_OP_32_64(shr):
CASE_OP_32_64(sar):
-#ifdef TCG_TARGET_HAS_rot_i32
- case INDEX_op_rotl_i32:
- case INDEX_op_rotr_i32:
-#endif
-#ifdef TCG_TARGET_HAS_rot_i64
- case INDEX_op_rotl_i64:
- case INDEX_op_rotr_i64:
-#endif
+ CASE_OP_32_64(rotl):
+ CASE_OP_32_64(rotr):
if (temps[args[1]].state == TCG_TEMP_CONST) {
/* Proceed with possible constant folding. */
break;
@@ -544,34 +410,13 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
args += 2;
break;
CASE_OP_32_64(not):
-#ifdef TCG_TARGET_HAS_ext8s_i32
- case INDEX_op_ext8s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8s_i64
- case INDEX_op_ext8s_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i32
- case INDEX_op_ext16s_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i64
- case INDEX_op_ext16s_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i32
- case INDEX_op_ext8u_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i64
- case INDEX_op_ext8u_i64:
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i32
- case INDEX_op_ext16u_i32:
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i64
- case INDEX_op_ext16u_i64:
-#endif
-#if TCG_TARGET_REG_BITS == 64
+ CASE_OP_32_64(neg):
+ CASE_OP_32_64(ext8s):
+ CASE_OP_32_64(ext8u):
+ CASE_OP_32_64(ext16s):
+ CASE_OP_32_64(ext16u):
case INDEX_op_ext32s_i64:
case INDEX_op_ext32u_i64:
-#endif
if (temps[args[1]].state == TCG_TEMP_CONST) {
gen_opc_buf[op_index] = op_to_movi(op);
tmp = do_constant_folding(op, temps[args[1]].val, 0);
@@ -596,14 +441,13 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
CASE_OP_32_64(shl):
CASE_OP_32_64(shr):
CASE_OP_32_64(sar):
-#ifdef TCG_TARGET_HAS_rot_i32
- case INDEX_op_rotl_i32:
- case INDEX_op_rotr_i32:
-#endif
-#ifdef TCG_TARGET_HAS_rot_i64
- case INDEX_op_rotl_i64:
- case INDEX_op_rotr_i64:
-#endif
+ CASE_OP_32_64(rotl):
+ CASE_OP_32_64(rotr):
+ CASE_OP_32_64(andc):
+ CASE_OP_32_64(orc):
+ CASE_OP_32_64(eqv):
+ CASE_OP_32_64(nand):
+ CASE_OP_32_64(nor):
if (temps[args[1]].state == TCG_TEMP_CONST
&& temps[args[2]].state == TCG_TEMP_CONST) {
gen_opc_buf[op_index] = op_to_movi(op);