summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSergey Fedorov <serge.fdrv@gmail.com>2016-04-08 19:48:12 +0300
committerRichard Henderson <rth@twiddle.net>2016-05-12 14:06:41 -1000
commit5b053a4a28278bca606eeff7d1c0730df1b047e9 (patch)
treee9567828b03440c0d228edfc0aef296dafb34800
parentf9c5b66f487a04d3747dc6997b1503f9258df945 (diff)
downloadqemu-5b053a4a28278bca606eeff7d1c0730df1b047e9.tar.gz
tcg: Clean up direct block chaining safety checks
We don't take care of direct jumps when address mapping changes. Thus we must be sure to generate direct jumps so that they always keep valid even if address mapping changes. Luckily, we can only allow to execute a TB if it was generated from the pages which match with current mapping. Document tcg_gen_goto_tb() declaration and note the reason for destination PC limitations. Some targets with variable length instructions allow TB to straddle a page boundary. However, we make sure that both of TB pages match the current address mapping when looking up TBs. So it is safe to do direct jumps into the both pages. Correct the checks for some of those targets. Given that, we can safely patch a TB which spans two pages. Remove the unnecessary check in cpu_exec() and allow such TBs to be patched. Signed-off-by: Sergey Fedorov <serge.fdrv@gmail.com> Signed-off-by: Sergey Fedorov <sergey.fedorov@linaro.org> Reviewed-by: Alex Bennée <alex.bennee@linaro.org> Signed-off-by: Richard Henderson <rth@twiddle.net>
-rw-r--r--cpu-exec.c7
-rw-r--r--target-arm/translate.c3
-rw-r--r--target-cris/translate.c4
-rw-r--r--target-i386/translate.c2
-rw-r--r--target-m68k/translate.c2
-rw-r--r--target-s390x/translate.c2
-rw-r--r--tcg/tcg-op.h10
7 files changed, 20 insertions, 10 deletions
diff --git a/cpu-exec.c b/cpu-exec.c
index debc65ca69..f984dc71cb 100644
--- a/cpu-exec.c
+++ b/cpu-exec.c
@@ -508,11 +508,8 @@ int cpu_exec(CPUState *cpu)
next_tb = 0;
tcg_ctx.tb_ctx.tb_invalidated_flag = 0;
}
- /* see if we can patch the calling TB. When the TB
- spans two pages, we cannot safely do a direct
- jump. */
- if (next_tb != 0 && tb->page_addr[1] == -1
- && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
+ /* See if we can patch the calling TB. */
+ if (next_tb != 0 && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
tb_add_jump((TranslationBlock *)(next_tb & ~TB_EXIT_MASK),
next_tb & TB_EXIT_MASK, tb);
}
diff --git a/target-arm/translate.c b/target-arm/translate.c
index 940ec8d981..34196a8217 100644
--- a/target-arm/translate.c
+++ b/target-arm/translate.c
@@ -4054,7 +4054,8 @@ static inline void gen_goto_tb(DisasContext *s, int n, target_ulong dest)
TranslationBlock *tb;
tb = s->tb;
- if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
+ if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
+ ((s->pc - 1) & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
tcg_gen_goto_tb(n);
gen_set_pc_im(s, dest);
tcg_gen_exit_tb((uintptr_t)tb + n);
diff --git a/target-cris/translate.c b/target-cris/translate.c
index a73176c118..9c8ff8f230 100644
--- a/target-cris/translate.c
+++ b/target-cris/translate.c
@@ -524,7 +524,9 @@ static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
{
TranslationBlock *tb;
tb = dc->tb;
- if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
+
+ if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
+ (dc->ppc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
tcg_gen_goto_tb(n);
tcg_gen_movi_tl(env_pc, dest);
tcg_gen_exit_tb((uintptr_t)tb + n);
diff --git a/target-i386/translate.c b/target-i386/translate.c
index 3a32f65f8d..058d85a1ab 100644
--- a/target-i386/translate.c
+++ b/target-i386/translate.c
@@ -2094,7 +2094,7 @@ static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
tb = s->tb;
/* NOTE: we handle the case where the TB spans two pages here */
if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
- (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
+ (pc & TARGET_PAGE_MASK) == (s->pc_start & TARGET_PAGE_MASK)) {
/* jump to same page: we can use a direct jump */
tcg_gen_goto_tb(tb_num);
gen_jmp_im(eip);
diff --git a/target-m68k/translate.c b/target-m68k/translate.c
index 7560c3a808..e2ce6c615e 100644
--- a/target-m68k/translate.c
+++ b/target-m68k/translate.c
@@ -861,7 +861,7 @@ static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
if (unlikely(s->singlestep_enabled)) {
gen_exception(s, dest, EXCP_DEBUG);
} else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
- (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
+ (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
tcg_gen_goto_tb(n);
tcg_gen_movi_i32(QREG_PC, dest);
tcg_gen_exit_tb((uintptr_t)tb + n);
diff --git a/target-s390x/translate.c b/target-s390x/translate.c
index c871ef2bb3..c5179fe05d 100644
--- a/target-s390x/translate.c
+++ b/target-s390x/translate.c
@@ -610,7 +610,7 @@ static int use_goto_tb(DisasContext *s, uint64_t dest)
{
/* NOTE: we handle the case where the TB spans two pages here */
return (((dest & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK)
- || (dest & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))
+ || (dest & TARGET_PAGE_MASK) == (s->pc & TARGET_PAGE_MASK))
&& !s->singlestep_enabled
&& !(s->tb->cflags & CF_LAST_IO)
&& !(s->tb->flags & FLAG_MASK_PER));
diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h
index c446d3dc72..ace39619ef 100644
--- a/tcg/tcg-op.h
+++ b/tcg/tcg-op.h
@@ -753,6 +753,16 @@ static inline void tcg_gen_exit_tb(uintptr_t val)
tcg_gen_op1i(INDEX_op_exit_tb, val);
}
+/**
+ * tcg_gen_goto_tb() - output goto_tb TCG operation
+ * @idx: Direct jump slot index (0 or 1)
+ *
+ * See tcg/README for more info about this TCG operation.
+ *
+ * NOTE: Direct jumps with goto_tb are only safe within the pages this TB
+ * resides in because we don't take care of direct jumps when address mapping
+ * changes, e.g. in tlb_flush().
+ */
void tcg_gen_goto_tb(unsigned idx);
#if TARGET_LONG_BITS == 32