summaryrefslogtreecommitdiff
path: root/tcg/aarch64
diff options
context:
space:
mode:
authorPranith Kumar <bobby.prani@gmail.com>2017-06-30 10:36:14 -0400
committerRichard Henderson <rth@twiddle.net>2017-07-09 21:10:23 -1000
commit2acee8b2b5e6bba2935bb6ce5be92d0f0f9799cb (patch)
treea733a5f747e60eb961c9a0ab1c8b5fa1d88e129a /tcg/aarch64
parentb68686bd4bfeb70040b4099df993dfa0b4f37b03 (diff)
downloadqemu-2acee8b2b5e6bba2935bb6ce5be92d0f0f9799cb.tar.gz
tcg/aarch64: Enable indirect jump path using LDR (literal)
This patch enables the indirect jump path using an LDR (literal) instruction. It will be interesting to test and see which performs better among the two paths. CC: Alex Bennée <alex.bennee@linaro.org> Reviewed-by: Richard Henderson <rth@twiddle.net> Signed-off-by: Pranith Kumar <bobby.prani@gmail.com> Message-Id: <20170630143614.31059-3-bobby.prani@gmail.com> Signed-off-by: Richard Henderson <rth@twiddle.net>
Diffstat (limited to 'tcg/aarch64')
-rw-r--r--tcg/aarch64/tcg-target.inc.c42
1 files changed, 28 insertions, 14 deletions
diff --git a/tcg/aarch64/tcg-target.inc.c b/tcg/aarch64/tcg-target.inc.c
index a84422d633..04bc369a92 100644
--- a/tcg/aarch64/tcg-target.inc.c
+++ b/tcg/aarch64/tcg-target.inc.c
@@ -269,6 +269,8 @@ typedef enum {
I3207_BLR = 0xd63f0000,
I3207_RET = 0xd65f0000,
+ /* Load literal for loading the address at pc-relative offset */
+ I3305_LDR = 0x58000000,
/* Load/store register. Described here as 3.3.12, but the helper
that emits them can transform to 3.3.10 or 3.3.13. */
I3312_STRB = 0x38000000 | LDST_ST << 22 | MO_8 << 30,
@@ -389,6 +391,11 @@ static inline uint32_t tcg_in32(TCGContext *s)
#define tcg_out_insn(S, FMT, OP, ...) \
glue(tcg_out_insn_,FMT)(S, glue(glue(glue(I,FMT),_),OP), ## __VA_ARGS__)
+static void tcg_out_insn_3305(TCGContext *s, AArch64Insn insn, int imm19, TCGReg rt)
+{
+ tcg_out32(s, insn | (imm19 & 0x7ffff) << 5 | rt);
+}
+
static void tcg_out_insn_3201(TCGContext *s, AArch64Insn insn, TCGType ext,
TCGReg rt, int imm19)
{
@@ -864,6 +871,8 @@ static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *target)
}
}
+#ifdef USE_DIRECT_JUMP
+
void aarch64_tb_set_jmp_target(uintptr_t jmp_addr, uintptr_t addr)
{
tcg_insn_unit i1, i2;
@@ -889,6 +898,8 @@ void aarch64_tb_set_jmp_target(uintptr_t jmp_addr, uintptr_t addr)
flush_icache_range(jmp_addr, jmp_addr + 8);
}
+#endif
+
static inline void tcg_out_goto_label(TCGContext *s, TCGLabel *l)
{
if (!l->has_value) {
@@ -1400,21 +1411,24 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_goto_tb:
-#ifndef USE_DIRECT_JUMP
-#error "USE_DIRECT_JUMP required for aarch64"
-#endif
- /* consistency for USE_DIRECT_JUMP */
- tcg_debug_assert(s->tb_jmp_insn_offset != NULL);
- /* Ensure that ADRP+ADD are 8-byte aligned so that an atomic
- write can be used to patch the target address. */
- if ((uintptr_t)s->code_ptr & 7) {
- tcg_out32(s, NOP);
+ if (s->tb_jmp_insn_offset != NULL) {
+ /* USE_DIRECT_JUMP */
+ /* Ensure that ADRP+ADD are 8-byte aligned so that an atomic
+ write can be used to patch the target address. */
+ if ((uintptr_t)s->code_ptr & 7) {
+ tcg_out32(s, NOP);
+ }
+ s->tb_jmp_insn_offset[a0] = tcg_current_code_size(s);
+ /* actual branch destination will be patched by
+ aarch64_tb_set_jmp_target later. */
+ tcg_out_insn(s, 3406, ADRP, TCG_REG_TMP, 0);
+ tcg_out_insn(s, 3401, ADDI, TCG_TYPE_I64, TCG_REG_TMP, TCG_REG_TMP, 0);
+ } else {
+ /* !USE_DIRECT_JUMP */
+ tcg_debug_assert(s->tb_jmp_target_addr != NULL);
+ intptr_t offset = tcg_pcrel_diff(s, (s->tb_jmp_target_addr + a0)) >> 2;
+ tcg_out_insn(s, 3305, LDR, offset, TCG_REG_TMP);
}
- s->tb_jmp_insn_offset[a0] = tcg_current_code_size(s);
- /* actual branch destination will be patched by
- aarch64_tb_set_jmp_target later. */
- tcg_out_insn(s, 3406, ADRP, TCG_REG_TMP, 0);
- tcg_out_insn(s, 3401, ADDI, TCG_TYPE_I64, TCG_REG_TMP, TCG_REG_TMP, 0);
tcg_out_insn(s, 3207, BR, TCG_REG_TMP);
s->tb_jmp_reset_offset[a0] = tcg_current_code_size(s);
break;