From b068d6a7138292de0f5c5fa6c99f0b79d4e1e7f0 Mon Sep 17 00:00:00 2001 From: j_mayer Date: Sun, 7 Oct 2007 17:13:44 +0000 Subject: PowerPC target optimisations: make intensive use of always_inline. git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3347 c046a42c-6fe2-441c-8c8c-71466251a162 --- hw/ppc.c | 28 +++---- hw/ppc405_uc.c | 2 +- hw/ppc_prep.c | 7 +- target-ppc/exec.h | 14 ++-- target-ppc/helper.c | 45 ++++++------ target-ppc/op_helper.c | 108 +++++++++++++-------------- target-ppc/op_helper.h | 30 ++++---- target-ppc/op_helper_mem.h | 9 ++- target-ppc/op_mem.h | 106 +++++++++++++++------------ target-ppc/translate.c | 179 +++++++++++++++++++++++---------------------- 10 files changed, 275 insertions(+), 253 deletions(-) diff --git a/hw/ppc.c b/hw/ppc.c index a9bfc47d39..f1722bb912 100644 --- a/hw/ppc.c +++ b/hw/ppc.c @@ -424,7 +424,8 @@ struct ppc_tb_t { void *opaque; }; -static inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env, int64_t tb_offset) +static always_inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env, + int64_t tb_offset) { /* TB time in tb periods */ return muldiv64(qemu_get_clock(vm_clock) + tb_env->tb_offset, @@ -446,7 +447,7 @@ uint32_t cpu_ppc_load_tbl (CPUState *env) return tb & 0xFFFFFFFF; } -static inline uint32_t _cpu_ppc_load_tbu (CPUState *env) +static always_inline uint32_t _cpu_ppc_load_tbu (CPUState *env) { ppc_tb_t *tb_env = env->tb_env; uint64_t tb; @@ -466,8 +467,9 @@ uint32_t cpu_ppc_load_tbu (CPUState *env) return _cpu_ppc_load_tbu(env); } -static inline void cpu_ppc_store_tb (ppc_tb_t *tb_env, int64_t *tb_offsetp, - uint64_t value) +static always_inline void cpu_ppc_store_tb (ppc_tb_t *tb_env, + int64_t *tb_offsetp, + uint64_t value) { *tb_offsetp = muldiv64(value, ticks_per_sec, tb_env->tb_freq) - qemu_get_clock(vm_clock); @@ -489,7 +491,7 @@ void cpu_ppc_store_tbl (CPUState *env, uint32_t value) cpu_ppc_store_tb(tb_env, &tb_env->tb_offset, tb | (uint64_t)value); } -static inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value) +static always_inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value) { ppc_tb_t *tb_env = env->tb_env; uint64_t tb; @@ -556,7 +558,8 @@ void cpu_ppc_store_atbu (CPUState *env, uint32_t value) ((uint64_t)value << 32) | tb); } -static inline uint32_t _cpu_ppc_load_decr (CPUState *env, uint64_t *next) +static always_inline uint32_t _cpu_ppc_load_decr (CPUState *env, + uint64_t *next) { ppc_tb_t *tb_env = env->tb_env; uint32_t decr; @@ -605,7 +608,7 @@ uint64_t cpu_ppc_load_purr (CPUState *env) /* When decrementer expires, * all we need to do is generate or queue a CPU exception */ -static inline void cpu_ppc_decr_excp (CPUState *env) +static always_inline void cpu_ppc_decr_excp (CPUState *env) { /* Raise it */ #ifdef PPC_DEBUG_TB @@ -616,7 +619,7 @@ static inline void cpu_ppc_decr_excp (CPUState *env) ppc_set_irq(env, PPC_INTERRUPT_DECR, 1); } -static inline void cpu_ppc_hdecr_excp (CPUState *env) +static always_inline void cpu_ppc_hdecr_excp (CPUState *env) { /* Raise it */ #ifdef PPC_DEBUG_TB @@ -657,9 +660,8 @@ static void __cpu_ppc_store_decr (CPUState *env, uint64_t *nextp, (*raise_excp)(env); } - -static inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr, - uint32_t value, int is_excp) +static always_inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr, + uint32_t value, int is_excp) { ppc_tb_t *tb_env = env->tb_env; @@ -678,8 +680,8 @@ static void cpu_ppc_decr_cb (void *opaque) } #if defined(TARGET_PPC64H) -static inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr, - uint32_t value, int is_excp) +static always_inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr, + uint32_t value, int is_excp) { ppc_tb_t *tb_env = env->tb_env; diff --git a/hw/ppc405_uc.c b/hw/ppc405_uc.c index 55f4bcc3d1..2857a1255e 100644 --- a/hw/ppc405_uc.c +++ b/hw/ppc405_uc.c @@ -463,7 +463,7 @@ static uint32_t sdram_bcr (target_phys_addr_t ram_base, return bcr; } -static inline target_phys_addr_t sdram_base (uint32_t bcr) +static always_inline target_phys_addr_t sdram_base (uint32_t bcr) { return bcr & 0xFF800000; } diff --git a/hw/ppc_prep.c b/hw/ppc_prep.c index 16d8915e5f..2c4b242030 100644 --- a/hw/ppc_prep.c +++ b/hw/ppc_prep.c @@ -107,7 +107,7 @@ static void _PPC_intack_write (void *opaque, // printf("%s: 0x%08x => 0x%08x\n", __func__, addr, value); } -static inline uint32_t _PPC_intack_read (target_phys_addr_t addr) +static always_inline uint32_t _PPC_intack_read (target_phys_addr_t addr) { uint32_t retval = 0; @@ -412,8 +412,9 @@ static uint32_t PREP_io_800_readb (void *opaque, uint32_t addr) return retval; } -static inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl, - target_phys_addr_t addr) +static always_inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl, + target_phys_addr_t + addr) { if (sysctrl->contiguous_map == 0) { /* 64 KB contiguous space for IOs */ diff --git a/target-ppc/exec.h b/target-ppc/exec.h index 8a54258271..0c53de42b1 100644 --- a/target-ppc/exec.h +++ b/target-ppc/exec.h @@ -68,23 +68,23 @@ register unsigned long T2 asm(AREG3); # define RETURN() __asm__ __volatile__("" : : : "memory"); #endif -static inline target_ulong rotl8 (target_ulong i, int n) +static always_inline target_ulong rotl8 (target_ulong i, int n) { return (((uint8_t)i << n) | ((uint8_t)i >> (8 - n))); } -static inline target_ulong rotl16 (target_ulong i, int n) +static always_inline target_ulong rotl16 (target_ulong i, int n) { return (((uint16_t)i << n) | ((uint16_t)i >> (16 - n))); } -static inline target_ulong rotl32 (target_ulong i, int n) +static always_inline target_ulong rotl32 (target_ulong i, int n) { return (((uint32_t)i << n) | ((uint32_t)i >> (32 - n))); } #if defined(TARGET_PPC64) -static inline target_ulong rotl64 (target_ulong i, int n) +static always_inline target_ulong rotl64 (target_ulong i, int n) { return (((uint64_t)i << n) | ((uint64_t)i >> (64 - n))); } @@ -103,18 +103,18 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong vaddr, void ppc6xx_tlb_store (CPUState *env, target_ulong EPN, int way, int is_code, target_ulong pte0, target_ulong pte1); -static inline void env_to_regs (void) +static always_inline void env_to_regs (void) { } -static inline void regs_to_env (void) +static always_inline void regs_to_env (void) { } int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw, int is_user, int is_softmmu); -static inline int cpu_halted (CPUState *env) +static always_inline int cpu_halted (CPUState *env) { if (!env->halted) return 0; diff --git a/target-ppc/helper.c b/target-ppc/helper.c index f1fe6806c6..205e35f49e 100644 --- a/target-ppc/helper.c +++ b/target-ppc/helper.c @@ -67,23 +67,23 @@ target_phys_addr_t cpu_get_phys_page_debug (CPUState *env, target_ulong addr) #else /* Common routines used by software and hardware TLBs emulation */ -static inline int pte_is_valid (target_ulong pte0) +static always_inline int pte_is_valid (target_ulong pte0) { return pte0 & 0x80000000 ? 1 : 0; } -static inline void pte_invalidate (target_ulong *pte0) +static always_inline void pte_invalidate (target_ulong *pte0) { *pte0 &= ~0x80000000; } #if defined(TARGET_PPC64) -static inline int pte64_is_valid (target_ulong pte0) +static always_inline int pte64_is_valid (target_ulong pte0) { return pte0 & 0x0000000000000001ULL ? 1 : 0; } -static inline void pte64_invalidate (target_ulong *pte0) +static always_inline void pte64_invalidate (target_ulong *pte0) { *pte0 &= ~0x0000000000000001ULL; } @@ -96,9 +96,9 @@ static inline void pte64_invalidate (target_ulong *pte0) #define PTE64_CHECK_MASK (TARGET_PAGE_MASK | 0x7F) #endif -static inline int _pte_check (mmu_ctx_t *ctx, int is_64b, - target_ulong pte0, target_ulong pte1, - int h, int rw) +static always_inline int _pte_check (mmu_ctx_t *ctx, int is_64b, + target_ulong pte0, target_ulong pte1, + int h, int rw) { target_ulong ptem, mmask; int access, ret, pteh, ptev; @@ -258,9 +258,10 @@ static void ppc6xx_tlb_invalidate_all (CPUState *env) tlb_flush(env, 1); } -static inline void __ppc6xx_tlb_invalidate_virt (CPUState *env, - target_ulong eaddr, - int is_code, int match_epn) +static always_inline void __ppc6xx_tlb_invalidate_virt (CPUState *env, + target_ulong eaddr, + int is_code, + int match_epn) { #if !defined(FLUSH_ALL_TLBS) ppc6xx_tlb_t *tlb; @@ -487,7 +488,7 @@ static int get_bat (CPUState *env, mmu_ctx_t *ctx, } /* PTE table lookup */ -static inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw) +static always_inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw) { target_ulong base, pte0, pte1; int i, good = -1; @@ -588,7 +589,8 @@ static int find_pte64 (mmu_ctx_t *ctx, int h, int rw) } #endif -static inline int find_pte (CPUState *env, mmu_ctx_t *ctx, int h, int rw) +static always_inline int find_pte (CPUState *env, mmu_ctx_t *ctx, + int h, int rw) { #if defined(TARGET_PPC64) if (env->mmu_model == POWERPC_MMU_64B) @@ -720,10 +722,10 @@ void ppc_store_slb (CPUPPCState *env, int slb_nr, target_ulong rs) #endif /* defined(TARGET_PPC64) */ /* Perform segment based translation */ -static inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1, - int sdr_sh, - target_phys_addr_t hash, - target_phys_addr_t mask) +static always_inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1, + int sdr_sh, + target_phys_addr_t hash, + target_phys_addr_t mask) { return (sdr1 & ((target_ulong)(-1ULL) << sdr_sh)) | (hash & mask); } @@ -1594,8 +1596,9 @@ int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw, /*****************************************************************************/ /* BATs management */ #if !defined(FLUSH_ALL_TLBS) -static inline void do_invalidate_BAT (CPUPPCState *env, - target_ulong BATu, target_ulong mask) +static always_inline void do_invalidate_BAT (CPUPPCState *env, + target_ulong BATu, + target_ulong mask) { target_ulong base, end, page; @@ -1616,8 +1619,8 @@ static inline void do_invalidate_BAT (CPUPPCState *env, } #endif -static inline void dump_store_bat (CPUPPCState *env, char ID, int ul, int nr, - target_ulong value) +static always_inline void dump_store_bat (CPUPPCState *env, char ID, + int ul, int nr, target_ulong value) { #if defined (DEBUG_BATS) if (loglevel != 0) { @@ -1931,7 +1934,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value) } /* Swap temporary saved registers with GPRs */ -static inline void swap_gpr_tgpr (CPUPPCState *env) +static always_inline void swap_gpr_tgpr (CPUPPCState *env) { ppc_gpr_t tmp; diff --git a/target-ppc/op_helper.c b/target-ppc/op_helper.c index 56d2af75d1..7a758f6c06 100644 --- a/target-ppc/op_helper.c +++ b/target-ppc/op_helper.c @@ -601,7 +601,7 @@ void do_srad (void) } #endif -static inline int popcnt (uint32_t val) +static always_inline int popcnt (uint32_t val) { int i; @@ -707,7 +707,7 @@ void do_fctidz (void) #endif -static inline void do_fri (int rounding_mode) +static always_inline void do_fri (int rounding_mode) { int curmode; @@ -1430,12 +1430,12 @@ static uint8_t hbrev[16] = { 0x1, 0x9, 0x5, 0xD, 0x3, 0xB, 0x7, 0xF, }; -static inline uint8_t byte_reverse (uint8_t val) +static always_inline uint8_t byte_reverse (uint8_t val) { return hbrev[val >> 4] | (hbrev[val & 0xF] << 4); } -static inline uint32_t word_reverse (uint32_t val) +static always_inline uint32_t word_reverse (uint32_t val) { return byte_reverse(val >> 24) | (byte_reverse(val >> 16) << 8) | (byte_reverse(val >> 8) << 16) | (byte_reverse(val) << 24); @@ -1468,7 +1468,7 @@ void do_ev##name (void) \ } /* Fixed-point vector arithmetic */ -static inline uint32_t _do_eabs (uint32_t val) +static always_inline uint32_t _do_eabs (uint32_t val) { if (val != 0x80000000) val &= ~0x80000000; @@ -1476,12 +1476,12 @@ static inline uint32_t _do_eabs (uint32_t val) return val; } -static inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2) { return op1 + op2; } -static inline int _do_ecntlsw (uint32_t val) +static always_inline int _do_ecntlsw (uint32_t val) { if (val & 0x80000000) return _do_cntlzw(~val); @@ -1489,12 +1489,12 @@ static inline int _do_ecntlsw (uint32_t val) return _do_cntlzw(val); } -static inline int _do_ecntlzw (uint32_t val) +static always_inline int _do_ecntlzw (uint32_t val) { return _do_cntlzw(val); } -static inline uint32_t _do_eneg (uint32_t val) +static always_inline uint32_t _do_eneg (uint32_t val) { if (val != 0x80000000) val ^= 0x80000000; @@ -1502,35 +1502,35 @@ static inline uint32_t _do_eneg (uint32_t val) return val; } -static inline uint32_t _do_erlw (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_erlw (uint32_t op1, uint32_t op2) { return rotl32(op1, op2); } -static inline uint32_t _do_erndw (uint32_t val) +static always_inline uint32_t _do_erndw (uint32_t val) { return (val + 0x000080000000) & 0xFFFF0000; } -static inline uint32_t _do_eslw (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_eslw (uint32_t op1, uint32_t op2) { /* No error here: 6 bits are used */ return op1 << (op2 & 0x3F); } -static inline int32_t _do_esrws (int32_t op1, uint32_t op2) +static always_inline int32_t _do_esrws (int32_t op1, uint32_t op2) { /* No error here: 6 bits are used */ return op1 >> (op2 & 0x3F); } -static inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2) { /* No error here: 6 bits are used */ return op1 >> (op2 & 0x3F); } -static inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2) { return op2 - op1; } @@ -1559,7 +1559,7 @@ DO_SPE_OP2(srwu); DO_SPE_OP2(subfw); /* evsel is a little bit more complicated... */ -static inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n) +static always_inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n) { if (n) return op1; @@ -1582,31 +1582,31 @@ void do_ev##name (void) \ _do_e##name(T0_64, T1_64)); \ } -static inline uint32_t _do_evcmp_merge (int t0, int t1) +static always_inline uint32_t _do_evcmp_merge (int t0, int t1) { return (t0 << 3) | (t1 << 2) | ((t0 | t1) << 1) | (t0 & t1); } -static inline int _do_ecmpeq (uint32_t op1, uint32_t op2) +static always_inline int _do_ecmpeq (uint32_t op1, uint32_t op2) { return op1 == op2 ? 1 : 0; } -static inline int _do_ecmpgts (int32_t op1, int32_t op2) +static always_inline int _do_ecmpgts (int32_t op1, int32_t op2) { return op1 > op2 ? 1 : 0; } -static inline int _do_ecmpgtu (uint32_t op1, uint32_t op2) +static always_inline int _do_ecmpgtu (uint32_t op1, uint32_t op2) { return op1 > op2 ? 1 : 0; } -static inline int _do_ecmplts (int32_t op1, int32_t op2) +static always_inline int _do_ecmplts (int32_t op1, int32_t op2) { return op1 < op2 ? 1 : 0; } -static inline int _do_ecmpltu (uint32_t op1, uint32_t op2) +static always_inline int _do_ecmpltu (uint32_t op1, uint32_t op2) { return op1 < op2 ? 1 : 0; } @@ -1623,7 +1623,7 @@ DO_SPE_CMP(cmplts); DO_SPE_CMP(cmpltu); /* Single precision floating-point conversions from/to integer */ -static inline uint32_t _do_efscfsi (int32_t val) +static always_inline uint32_t _do_efscfsi (int32_t val) { union { uint32_t u; @@ -1635,7 +1635,7 @@ static inline uint32_t _do_efscfsi (int32_t val) return u.u; } -static inline uint32_t _do_efscfui (uint32_t val) +static always_inline uint32_t _do_efscfui (uint32_t val) { union { uint32_t u; @@ -1647,7 +1647,7 @@ static inline uint32_t _do_efscfui (uint32_t val) return u.u; } -static inline int32_t _do_efsctsi (uint32_t val) +static always_inline int32_t _do_efsctsi (uint32_t val) { union { int32_t u; @@ -1662,7 +1662,7 @@ static inline int32_t _do_efsctsi (uint32_t val) return float32_to_int32(u.f, &env->spe_status); } -static inline uint32_t _do_efsctui (uint32_t val) +static always_inline uint32_t _do_efsctui (uint32_t val) { union { int32_t u; @@ -1677,7 +1677,7 @@ static inline uint32_t _do_efsctui (uint32_t val) return float32_to_uint32(u.f, &env->spe_status); } -static inline int32_t _do_efsctsiz (uint32_t val) +static always_inline int32_t _do_efsctsiz (uint32_t val) { union { int32_t u; @@ -1692,7 +1692,7 @@ static inline int32_t _do_efsctsiz (uint32_t val) return float32_to_int32_round_to_zero(u.f, &env->spe_status); } -static inline uint32_t _do_efsctuiz (uint32_t val) +static always_inline uint32_t _do_efsctuiz (uint32_t val) { union { int32_t u; @@ -1738,7 +1738,7 @@ void do_efsctuiz (void) } /* Single precision floating-point conversion to/from fractional */ -static inline uint32_t _do_efscfsf (uint32_t val) +static always_inline uint32_t _do_efscfsf (uint32_t val) { union { uint32_t u; @@ -1753,7 +1753,7 @@ static inline uint32_t _do_efscfsf (uint32_t val) return u.u; } -static inline uint32_t _do_efscfuf (uint32_t val) +static always_inline uint32_t _do_efscfuf (uint32_t val) { union { uint32_t u; @@ -1768,7 +1768,7 @@ static inline uint32_t _do_efscfuf (uint32_t val) return u.u; } -static inline int32_t _do_efsctsf (uint32_t val) +static always_inline int32_t _do_efsctsf (uint32_t val) { union { int32_t u; @@ -1786,7 +1786,7 @@ static inline int32_t _do_efsctsf (uint32_t val) return float32_to_int32(u.f, &env->spe_status); } -static inline uint32_t _do_efsctuf (uint32_t val) +static always_inline uint32_t _do_efsctuf (uint32_t val) { union { int32_t u; @@ -1804,7 +1804,7 @@ static inline uint32_t _do_efsctuf (uint32_t val) return float32_to_uint32(u.f, &env->spe_status); } -static inline int32_t _do_efsctsfz (uint32_t val) +static always_inline int32_t _do_efsctsfz (uint32_t val) { union { int32_t u; @@ -1822,7 +1822,7 @@ static inline int32_t _do_efsctsfz (uint32_t val) return float32_to_int32_round_to_zero(u.f, &env->spe_status); } -static inline uint32_t _do_efsctufz (uint32_t val) +static always_inline uint32_t _do_efsctufz (uint32_t val) { union { int32_t u; @@ -1871,19 +1871,19 @@ void do_efsctufz (void) } /* Double precision floating point helpers */ -static inline int _do_efdcmplt (uint64_t op1, uint64_t op2) +static always_inline int _do_efdcmplt (uint64_t op1, uint64_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efdtstlt(op1, op2); } -static inline int _do_efdcmpgt (uint64_t op1, uint64_t op2) +static always_inline int _do_efdcmpgt (uint64_t op1, uint64_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efdtstgt(op1, op2); } -static inline int _do_efdcmpeq (uint64_t op1, uint64_t op2) +static always_inline int _do_efdcmpeq (uint64_t op1, uint64_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efdtsteq(op1, op2); @@ -1905,7 +1905,7 @@ void do_efdcmpeq (void) } /* Double precision floating-point conversion to/from integer */ -static inline uint64_t _do_efdcfsi (int64_t val) +static always_inline uint64_t _do_efdcfsi (int64_t val) { union { uint64_t u; @@ -1917,7 +1917,7 @@ static inline uint64_t _do_efdcfsi (int64_t val) return u.u; } -static inline uint64_t _do_efdcfui (uint64_t val) +static always_inline uint64_t _do_efdcfui (uint64_t val) { union { uint64_t u; @@ -1929,7 +1929,7 @@ static inline uint64_t _do_efdcfui (uint64_t val) return u.u; } -static inline int64_t _do_efdctsi (uint64_t val) +static always_inline int64_t _do_efdctsi (uint64_t val) { union { int64_t u; @@ -1944,7 +1944,7 @@ static inline int64_t _do_efdctsi (uint64_t val) return float64_to_int64(u.f, &env->spe_status); } -static inline uint64_t _do_efdctui (uint64_t val) +static always_inline uint64_t _do_efdctui (uint64_t val) { union { int64_t u; @@ -1959,7 +1959,7 @@ static inline uint64_t _do_efdctui (uint64_t val) return float64_to_uint64(u.f, &env->spe_status); } -static inline int64_t _do_efdctsiz (uint64_t val) +static always_inline int64_t _do_efdctsiz (uint64_t val) { union { int64_t u; @@ -1974,7 +1974,7 @@ static inline int64_t _do_efdctsiz (uint64_t val) return float64_to_int64_round_to_zero(u.f, &env->spe_status); } -static inline uint64_t _do_efdctuiz (uint64_t val) +static always_inline uint64_t _do_efdctuiz (uint64_t val) { union { int64_t u; @@ -2020,7 +2020,7 @@ void do_efdctuiz (void) } /* Double precision floating-point conversion to/from fractional */ -static inline uint64_t _do_efdcfsf (int64_t val) +static always_inline uint64_t _do_efdcfsf (int64_t val) { union { uint64_t u; @@ -2035,7 +2035,7 @@ static inline uint64_t _do_efdcfsf (int64_t val) return u.u; } -static inline uint64_t _do_efdcfuf (uint64_t val) +static always_inline uint64_t _do_efdcfuf (uint64_t val) { union { uint64_t u; @@ -2050,7 +2050,7 @@ static inline uint64_t _do_efdcfuf (uint64_t val) return u.u; } -static inline int64_t _do_efdctsf (uint64_t val) +static always_inline int64_t _do_efdctsf (uint64_t val) { union { int64_t u; @@ -2068,7 +2068,7 @@ static inline int64_t _do_efdctsf (uint64_t val) return float64_to_int32(u.f, &env->spe_status); } -static inline uint64_t _do_efdctuf (uint64_t val) +static always_inline uint64_t _do_efdctuf (uint64_t val) { union { int64_t u; @@ -2086,7 +2086,7 @@ static inline uint64_t _do_efdctuf (uint64_t val) return float64_to_uint32(u.f, &env->spe_status); } -static inline int64_t _do_efdctsfz (uint64_t val) +static always_inline int64_t _do_efdctsfz (uint64_t val) { union { int64_t u; @@ -2104,7 +2104,7 @@ static inline int64_t _do_efdctsfz (uint64_t val) return float64_to_int32_round_to_zero(u.f, &env->spe_status); } -static inline uint64_t _do_efdctufz (uint64_t val) +static always_inline uint64_t _do_efdctufz (uint64_t val) { union { int64_t u; @@ -2153,7 +2153,7 @@ void do_efdctufz (void) } /* Floating point conversion between single and double precision */ -static inline uint32_t _do_efscfd (uint64_t val) +static always_inline uint32_t _do_efscfd (uint64_t val) { union { uint64_t u; @@ -2170,7 +2170,7 @@ static inline uint32_t _do_efscfd (uint64_t val) return u2.u; } -static inline uint64_t _do_efdcfs (uint32_t val) +static always_inline uint64_t _do_efdcfs (uint32_t val) { union { uint64_t u; @@ -2214,19 +2214,19 @@ DO_SPE_OP2(fsmul); DO_SPE_OP2(fsdiv); /* Single-precision floating-point comparisons */ -static inline int _do_efscmplt (uint32_t op1, uint32_t op2) +static always_inline int _do_efscmplt (uint32_t op1, uint32_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efststlt(op1, op2); } -static inline int _do_efscmpgt (uint32_t op1, uint32_t op2) +static always_inline int _do_efscmpgt (uint32_t op1, uint32_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efststgt(op1, op2); } -static inline int _do_efscmpeq (uint32_t op1, uint32_t op2) +static always_inline int _do_efscmpeq (uint32_t op1, uint32_t op2) { /* XXX: TODO: test special values (NaN, infinites, ...) */ return _do_efststeq(op1, op2); diff --git a/target-ppc/op_helper.h b/target-ppc/op_helper.h index 65bee1ae72..4688bc2fa4 100644 --- a/target-ppc/op_helper.h +++ b/target-ppc/op_helper.h @@ -277,7 +277,7 @@ void do_evfsctuiz (void); /* Inlined helpers: used in micro-operation as well as helpers */ /* Generic fixed-point helpers */ -static inline int _do_cntlzw (uint32_t val) +static always_inline int _do_cntlzw (uint32_t val) { int cnt = 0; if (!(val & 0xFFFF0000UL)) { @@ -306,7 +306,7 @@ static inline int _do_cntlzw (uint32_t val) return cnt; } -static inline int _do_cntlzd (uint64_t val) +static always_inline int _do_cntlzd (uint64_t val) { int cnt = 0; #if HOST_LONG_BITS == 64 @@ -350,19 +350,19 @@ static inline int _do_cntlzd (uint64_t val) #if defined(TARGET_PPCEMB) /* SPE extension */ /* Single precision floating-point helpers */ -static inline uint32_t _do_efsabs (uint32_t val) +static always_inline uint32_t _do_efsabs (uint32_t val) { return val & ~0x80000000; } -static inline uint32_t _do_efsnabs (uint32_t val) +static always_inline uint32_t _do_efsnabs (uint32_t val) { return val | 0x80000000; } -static inline uint32_t _do_efsneg (uint32_t val) +static always_inline uint32_t _do_efsneg (uint32_t val) { return val ^ 0x80000000; } -static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -373,7 +373,7 @@ static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2) u1.f = float32_add(u1.f, u2.f, &env->spe_status); return u1.u; } -static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_efssub (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -384,7 +384,7 @@ static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2) u1.f = float32_sub(u1.f, u2.f, &env->spe_status); return u1.u; } -static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -395,7 +395,7 @@ static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2) u1.f = float32_mul(u1.f, u2.f, &env->spe_status); return u1.u; } -static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2) +static always_inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -407,7 +407,7 @@ static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2) return u1.u; } -static inline int _do_efststlt (uint32_t op1, uint32_t op2) +static always_inline int _do_efststlt (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -417,7 +417,7 @@ static inline int _do_efststlt (uint32_t op1, uint32_t op2) u2.u = op2; return float32_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0; } -static inline int _do_efststgt (uint32_t op1, uint32_t op2) +static always_inline int _do_efststgt (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -427,7 +427,7 @@ static inline int _do_efststgt (uint32_t op1, uint32_t op2) u2.u = op2; return float32_le(u1.f, u2.f, &env->spe_status) ? 0 : 1; } -static inline int _do_efststeq (uint32_t op1, uint32_t op2) +static always_inline int _do_efststeq (uint32_t op1, uint32_t op2) { union { uint32_t u; @@ -438,7 +438,7 @@ static inline int _do_efststeq (uint32_t op1, uint32_t op2) return float32_eq(u1.f, u2.f, &env->spe_status) ? 1 : 0; } /* Double precision floating-point helpers */ -static inline int _do_efdtstlt (uint64_t op1, uint64_t op2) +static always_inline int _do_efdtstlt (uint64_t op1, uint64_t op2) { union { uint64_t u; @@ -448,7 +448,7 @@ static inline int _do_efdtstlt (uint64_t op1, uint64_t op2) u2.u = op2; return float64_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0; } -static inline int _do_efdtstgt (uint64_t op1, uint64_t op2) +static always_inline int _do_efdtstgt (uint64_t op1, uint64_t op2) { union { uint64_t u; @@ -458,7 +458,7 @@ static inline int _do_efdtstgt (uint64_t op1, uint64_t op2) u2.u = op2; return float64_le(u1.f, u2.f, &env->spe_status) ? 0 : 1; } -static inline int _do_efdtsteq (uint64_t op1, uint64_t op2) +static always_inline int _do_efdtsteq (uint64_t op1, uint64_t op2) { union { uint64_t u; diff --git a/target-ppc/op_helper_mem.h b/target-ppc/op_helper_mem.h index 4b0bb841e5..a395e02626 100644 --- a/target-ppc/op_helper_mem.h +++ b/target-ppc/op_helper_mem.h @@ -19,14 +19,15 @@ */ /* Multiple word / string load and store */ -static inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA) +static always_inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA) { uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); return ((tmp & 0xFF000000UL) >> 24) | ((tmp & 0x00FF0000UL) >> 8) | ((tmp & 0x0000FF00UL) << 8) | ((tmp & 0x000000FFUL) << 24); } -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, target_ulong data) +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA, + target_ulong data) { uint32_t tmp = ((data & 0xFF000000UL) >> 24) | ((data & 0x00FF0000UL) >> 8) | @@ -399,7 +400,7 @@ void glue(do_POWER2_lfq, MEMSUFFIX) (void) FT1 = glue(ldfq, MEMSUFFIX)((uint32_t)(T0 + 4)); } -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) { union { double d; @@ -431,7 +432,7 @@ void glue(do_POWER2_stfq, MEMSUFFIX) (void) glue(stfq, MEMSUFFIX)((uint32_t)(T0 + 4), FT1); } -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) { union { double d; diff --git a/target-ppc/op_mem.h b/target-ppc/op_mem.h index 16d8667777..60c043386a 100644 --- a/target-ppc/op_mem.h +++ b/target-ppc/op_mem.h @@ -18,19 +18,19 @@ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ -static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA) +static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA) { uint16_t tmp = glue(lduw, MEMSUFFIX)(EA); return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); } -static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA) +static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA) { int16_t tmp = glue(lduw, MEMSUFFIX)(EA); return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); } -static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) +static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) { uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | @@ -38,7 +38,7 @@ static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) } #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB) -static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) { uint64_t tmp = glue(ldq, MEMSUFFIX)(EA); return ((tmp & 0xFF00000000000000ULL) >> 56) | @@ -53,12 +53,12 @@ static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) #endif #if defined(TARGET_PPC64) -static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA) +static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA) { return (int32_t)glue(ldl, MEMSUFFIX)(EA); } -static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) +static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) { uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | @@ -66,13 +66,15 @@ static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) } #endif -static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data) +static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA, + uint16_t data) { uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8); glue(stw, MEMSUFFIX)(EA, tmp); } -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA, + uint32_t data) { uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) | ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24); @@ -80,7 +82,8 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) } #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB) -static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA, + uint64_t data) { uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) | ((data & 0x00FF000000000000ULL) >> 40) | @@ -403,12 +406,12 @@ void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ } #endif -static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d) { glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status)); } -static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d) { union { double d; @@ -429,7 +432,7 @@ PPC_STF_OP_64(fs, stfs); PPC_STF_OP_64(fiwx, stfiwx); #endif -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) { union { double d; @@ -448,7 +451,7 @@ static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) glue(stfq, MEMSUFFIX)(EA, u.d); } -static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d) { union { float f; @@ -463,7 +466,7 @@ static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d) glue(stfl, MEMSUFFIX)(EA, u.f); } -static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d) +static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d) { union { double d; @@ -506,7 +509,7 @@ void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ } #endif -static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA) +static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA) { return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status); } @@ -518,7 +521,7 @@ PPC_LDF_OP_64(fd, ldfq); PPC_LDF_OP_64(fs, ldfs); #endif -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) { union { double d; @@ -538,7 +541,7 @@ static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) return u.d; } -static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA) +static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA) { union { float f; @@ -1105,7 +1108,7 @@ PPC_SPE_ST_OP(dd, stq); PPC_SPE_LD_OP(dd_le, ld64r); PPC_SPE_ST_OP(dd_le, st64r); #endif -static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32; @@ -1113,13 +1116,14 @@ static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(dw, spe_ldw); -static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(stl, MEMSUFFIX)(EA, data >> 32); glue(stl, MEMSUFFIX)(EA + 4, data); } PPC_SPE_ST_OP(dw, spe_stdw); -static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32; @@ -1127,14 +1131,14 @@ static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(dw_le, spe_ldw_le); -static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA, - uint64_t data) +static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(st32r, MEMSUFFIX)(EA, data >> 32); glue(st32r, MEMSUFFIX)(EA + 4, data); } PPC_SPE_ST_OP(dw_le, spe_stdw_le); -static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48; @@ -1144,7 +1148,8 @@ static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(dh, spe_ldh); -static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(stw, MEMSUFFIX)(EA, data >> 48); glue(stw, MEMSUFFIX)(EA + 2, data >> 32); @@ -1152,7 +1157,7 @@ static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data) glue(stw, MEMSUFFIX)(EA + 6, data); } PPC_SPE_ST_OP(dh, spe_stdh); -static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48; @@ -1162,8 +1167,8 @@ static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(dh_le, spe_ldh_le); -static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, - uint64_t data) +static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(st16r, MEMSUFFIX)(EA, data >> 48); glue(st16r, MEMSUFFIX)(EA + 2, data >> 32); @@ -1171,7 +1176,7 @@ static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, glue(st16r, MEMSUFFIX)(EA + 6, data); } PPC_SPE_ST_OP(dh_le, spe_stdh_le); -static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48; @@ -1179,13 +1184,14 @@ static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whe, spe_lwhe); -static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(stw, MEMSUFFIX)(EA, data >> 48); glue(stw, MEMSUFFIX)(EA + 2, data >> 16); } PPC_SPE_ST_OP(whe, spe_stwhe); -static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48; @@ -1193,14 +1199,14 @@ static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whe_le, spe_lwhe_le); -static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA, - uint64_t data) +static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(st16r, MEMSUFFIX)(EA, data >> 48); glue(st16r, MEMSUFFIX)(EA + 2, data >> 16); } PPC_SPE_ST_OP(whe_le, spe_stwhe_le); -static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32; @@ -1208,7 +1214,7 @@ static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whou, spe_lwhou); -static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32; @@ -1216,13 +1222,14 @@ static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whos, spe_lwhos); -static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(stw, MEMSUFFIX)(EA, data >> 32); glue(stw, MEMSUFFIX)(EA + 2, data); } PPC_SPE_ST_OP(who, spe_stwho); -static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32; @@ -1230,7 +1237,7 @@ static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whou_le, spe_lwhou_le); -static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32; @@ -1238,55 +1245,57 @@ static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whos_le, spe_lwhos_le); -static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA, - uint64_t data) +static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(st16r, MEMSUFFIX)(EA, data >> 32); glue(st16r, MEMSUFFIX)(EA + 2, data); } PPC_SPE_ST_OP(who_le, spe_stwho_le); #if !defined(TARGET_PPC64) -static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data) +static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(stl, MEMSUFFIX)(EA, data); } PPC_SPE_ST_OP(wwo, spe_stwwo); -static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA, - uint64_t data) +static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA, + uint64_t data) { glue(st32r, MEMSUFFIX)(EA, data); } PPC_SPE_ST_OP(wwo_le, spe_stwwo_le); #endif -static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA) { uint16_t tmp; tmp = glue(lduw, MEMSUFFIX)(EA); return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); } PPC_SPE_LD_OP(h, spe_lh); -static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA) { uint16_t tmp; tmp = glue(ld16r, MEMSUFFIX)(EA); return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); } PPC_SPE_LD_OP(h_le, spe_lh_le); -static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA) { uint32_t tmp; tmp = glue(ldl, MEMSUFFIX)(EA); return ((uint64_t)tmp << 32) | (uint64_t)tmp; } PPC_SPE_LD_OP(wwsplat, spe_lwwsplat); -static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA) +static always_inline +uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA) { uint32_t tmp; tmp = glue(ld32r, MEMSUFFIX)(EA); return ((uint64_t)tmp << 32) | (uint64_t)tmp; } PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le); -static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) +static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) { uint64_t ret; uint16_t tmp; @@ -1297,7 +1306,8 @@ static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) return ret; } PPC_SPE_LD_OP(whsplat, spe_lwhsplat); -static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA) +static always_inline +uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA) { uint64_t ret; uint16_t tmp; diff --git a/target-ppc/translate.c b/target-ppc/translate.c index 9d51bf9b1a..59d032d26b 100644 --- a/target-ppc/translate.c +++ b/target-ppc/translate.c @@ -53,7 +53,7 @@ static uint32_t *gen_opparam_ptr; #include "gen-op.h" -static inline void gen_set_T0 (target_ulong val) +static always_inline void gen_set_T0 (target_ulong val) { #if defined(TARGET_PPC64) if (val >> 32) @@ -63,7 +63,7 @@ static inline void gen_set_T0 (target_ulong val) gen_op_set_T0(val); } -static inline void gen_set_T1 (target_ulong val) +static always_inline void gen_set_T1 (target_ulong val) { #if defined(TARGET_PPC64) if (val >> 32) @@ -78,7 +78,7 @@ static GenOpFunc *NAME ## _table [8] = { \ NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \ NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \ }; \ -static inline void func(int n) \ +static always_inline void func (int n) \ { \ NAME ## _table[n](); \ } @@ -90,7 +90,7 @@ NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \ NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \ NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \ }; \ -static inline void func(int n) \ +static always_inline void func (int n) \ { \ NAME ## _table[n](); \ } @@ -106,7 +106,7 @@ NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \ NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \ NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \ }; \ -static inline void func(int n) \ +static always_inline void func (int n) \ { \ NAME ## _table[n](); \ } @@ -121,7 +121,7 @@ GEN8(gen_op_store_T1_crf, gen_op_store_T1_crf_crf); GEN8(gen_op_load_fpscr_T0, gen_op_load_fpscr_T0_fpscr); GEN8(gen_op_store_T0_fpscr, gen_op_store_T0_fpscr_fpscr); GEN8(gen_op_clear_fpscr, gen_op_clear_fpscr_fpscr); -static inline void gen_op_store_T0_fpscri (int n, uint8_t param) +static always_inline void gen_op_store_T0_fpscri (int n, uint8_t param) { gen_op_set_T0(param); gen_op_store_T0_fpscr(n); @@ -187,7 +187,7 @@ struct opc_handler_t { #endif }; -static inline void gen_set_Rc0 (DisasContext *ctx) +static always_inline void gen_set_Rc0 (DisasContext *ctx) { #if defined(TARGET_PPC64) if (ctx->sf_mode) @@ -198,7 +198,7 @@ static inline void gen_set_Rc0 (DisasContext *ctx) gen_op_set_Rc0(); } -static inline void gen_update_nip (DisasContext *ctx, target_ulong nip) +static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip) { #if defined(TARGET_PPC64) if (ctx->sf_mode) @@ -236,14 +236,14 @@ GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0) GEN_EXCP(ctx, POWERPC_EXCP_APU, 0) /* Stop translation */ -static inline void GEN_STOP (DisasContext *ctx) +static always_inline void GEN_STOP (DisasContext *ctx) { gen_update_nip(ctx, ctx->nip); ctx->exception = POWERPC_EXCP_STOP; } /* No need to update nip here, as execution flow will change */ -static inline void GEN_SYNC (DisasContext *ctx) +static always_inline void GEN_SYNC (DisasContext *ctx) { ctx->exception = POWERPC_EXCP_SYNC; } @@ -267,13 +267,13 @@ typedef struct opcode_t { /*****************************************************************************/ /*** Instruction decoding ***/ #define EXTRACT_HELPER(name, shift, nb) \ -static inline uint32_t name (uint32_t opcode) \ +static always_inline uint32_t name (uint32_t opcode) \ { \ return (opcode >> (shift)) & ((1 << (nb)) - 1); \ } #define EXTRACT_SHELPER(name, shift, nb) \ -static inline int32_t name (uint32_t opcode) \ +static always_inline int32_t name (uint32_t opcode) \ { \ return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ } @@ -304,7 +304,7 @@ EXTRACT_HELPER(crbA, 16, 5); EXTRACT_HELPER(crbB, 11, 5); /* SPR / TBL */ EXTRACT_HELPER(_SPR, 11, 10); -static inline uint32_t SPR (uint32_t opcode) +static always_inline uint32_t SPR (uint32_t opcode) { uint32_t sprn = _SPR(opcode); @@ -336,12 +336,12 @@ EXTRACT_HELPER(FPIMM, 20, 4); /* Displacement */ EXTRACT_SHELPER(d, 0, 16); /* Immediate address */ -static inline target_ulong LI (uint32_t opcode) +static always_inline target_ulong LI (uint32_t opcode) { return (opcode >> 0) & 0x03FFFFFC; } -static inline uint32_t BD (uint32_t opcode) +static always_inline uint32_t BD (uint32_t opcode) { return (opcode >> 0) & 0xFFFC; } @@ -354,7 +354,7 @@ EXTRACT_HELPER(AA, 1, 1); EXTRACT_HELPER(LK, 0, 1); /* Create a mask between and bits */ -static inline target_ulong MASK (uint32_t start, uint32_t end) +static always_inline target_ulong MASK (uint32_t start, uint32_t end) { target_ulong ret; @@ -694,7 +694,7 @@ __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type) #endif /* add add. addo addo. */ -static inline void gen_op_addo (void) +static always_inline void gen_op_addo (void) { gen_op_move_T2_T0(); gen_op_add(); @@ -702,7 +702,7 @@ static inline void gen_op_addo (void) } #if defined(TARGET_PPC64) #define gen_op_add_64 gen_op_add -static inline void gen_op_addo_64 (void) +static always_inline void gen_op_addo_64 (void) { gen_op_move_T2_T0(); gen_op_add(); @@ -711,13 +711,13 @@ static inline void gen_op_addo_64 (void) #endif GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER); /* addc addc. addco addco. */ -static inline void gen_op_addc (void) +static always_inline void gen_op_addc (void) { gen_op_move_T2_T0(); gen_op_add(); gen_op_check_addc(); } -static inline void gen_op_addco (void) +static always_inline void gen_op_addco (void) { gen_op_move_T2_T0(); gen_op_add(); @@ -725,13 +725,13 @@ static inline void gen_op_addco (void) gen_op_check_addo(); } #if defined(TARGET_PPC64) -static inline void gen_op_addc_64 (void) +static always_inline void gen_op_addc_64 (void) { gen_op_move_T2_T0(); gen_op_add(); gen_op_check_addc_64(); } -static inline void gen_op_addco_64 (void) +static always_inline void gen_op_addco_64 (void) { gen_op_move_T2_T0(); gen_op_add(); @@ -741,14 +741,14 @@ static inline void gen_op_addco_64 (void) #endif GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER); /* adde adde. addeo addeo. */ -static inline void gen_op_addeo (void) +static always_inline void gen_op_addeo (void) { gen_op_move_T2_T0(); gen_op_adde(); gen_op_check_addo(); } #if defined(TARGET_PPC64) -static inline void gen_op_addeo_64 (void) +static always_inline void gen_op_addeo_64 (void) { gen_op_move_T2_T0(); gen_op_adde_64(); @@ -757,13 +757,13 @@ static inline void gen_op_addeo_64 (void) #endif GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER); /* addme addme. addmeo addmeo. */ -static inline void gen_op_addme (void) +static always_inline void gen_op_addme (void) { gen_op_move_T1_T0(); gen_op_add_me(); } #if defined(TARGET_PPC64) -static inline void gen_op_addme_64 (void) +static always_inline void gen_op_addme_64 (void) { gen_op_move_T1_T0(); gen_op_add_me_64(); @@ -771,13 +771,13 @@ static inline void gen_op_addme_64 (void) #endif GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER); /* addze addze. addzeo addzeo. */ -static inline void gen_op_addze (void) +static always_inline void gen_op_addze (void) { gen_op_move_T2_T0(); gen_op_add_ze(); gen_op_check_addc(); } -static inline void gen_op_addzeo (void) +static always_inline void gen_op_addzeo (void) { gen_op_move_T2_T0(); gen_op_add_ze(); @@ -785,13 +785,13 @@ static inline void gen_op_addzeo (void) gen_op_check_addo(); } #if defined(TARGET_PPC64) -static inline void gen_op_addze_64 (void) +static always_inline void gen_op_addze_64 (void) { gen_op_move_T2_T0(); gen_op_add_ze(); gen_op_check_addc_64(); } -static inline void gen_op_addzeo_64 (void) +static always_inline void gen_op_addzeo_64 (void) { gen_op_move_T2_T0(); gen_op_add_ze(); @@ -813,7 +813,7 @@ GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER); /* neg neg. nego nego. */ GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER); /* subf subf. subfo subfo. */ -static inline void gen_op_subfo (void) +static always_inline void gen_op_subfo (void) { gen_op_move_T2_T0(); gen_op_subf(); @@ -821,7 +821,7 @@ static inline void gen_op_subfo (void) } #if defined(TARGET_PPC64) #define gen_op_subf_64 gen_op_subf -static inline void gen_op_subfo_64 (void) +static always_inline void gen_op_subfo_64 (void) { gen_op_move_T2_T0(); gen_op_subf(); @@ -830,12 +830,12 @@ static inline void gen_op_subfo_64 (void) #endif GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER); /* subfc subfc. subfco subfco. */ -static inline void gen_op_subfc (void) +static always_inline void gen_op_subfc (void) { gen_op_subf(); gen_op_check_subfc(); } -static inline void gen_op_subfco (void) +static always_inline void gen_op_subfco (void) { gen_op_move_T2_T0(); gen_op_subf(); @@ -843,12 +843,12 @@ static inline void gen_op_subfco (void) gen_op_check_subfo(); } #if defined(TARGET_PPC64) -static inline void gen_op_subfc_64 (void) +static always_inline void gen_op_subfc_64 (void) { gen_op_subf(); gen_op_check_subfc_64(); } -static inline void gen_op_subfco_64 (void) +static always_inline void gen_op_subfco_64 (void) { gen_op_move_T2_T0(); gen_op_subf(); @@ -858,7 +858,7 @@ static inline void gen_op_subfco_64 (void) #endif GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER); /* subfe subfe. subfeo subfeo. */ -static inline void gen_op_subfeo (void) +static always_inline void gen_op_subfeo (void) { gen_op_move_T2_T0(); gen_op_subfe(); @@ -866,7 +866,7 @@ static inline void gen_op_subfeo (void) } #if defined(TARGET_PPC64) #define gen_op_subfe_64 gen_op_subfe -static inline void gen_op_subfeo_64 (void) +static always_inline void gen_op_subfeo_64 (void) { gen_op_move_T2_T0(); gen_op_subfe_64(); @@ -1407,7 +1407,7 @@ GEN_HANDLER(name##3, opc1, opc2 | 0x11, 0xFF, 0x00000000, PPC_64B) \ gen_##name(ctx, 1, 1); \ } -static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask) +static always_inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask) { if (mask >> 32) gen_op_andi_T0_64(mask >> 32, mask & 0xFFFFFFFF); @@ -1415,7 +1415,7 @@ static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask) gen_op_andi_T0(mask); } -static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask) +static always_inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask) { if (mask >> 32) gen_op_andi_T1_64(mask >> 32, mask & 0xFFFFFFFF); @@ -1423,8 +1423,8 @@ static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask) gen_op_andi_T1(mask); } -static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me, - uint32_t sh) +static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb, + uint32_t me, uint32_t sh) { gen_op_load_gpr_T0(rS(ctx->opcode)); if (likely(sh == 0)) { @@ -1453,7 +1453,7 @@ static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me, gen_set_Rc0(ctx); } /* rldicl - rldicl. */ -static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) +static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) { uint32_t sh, mb; @@ -1463,7 +1463,7 @@ static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) } GEN_PPC64_R4(rldicl, 0x1E, 0x00); /* rldicr - rldicr. */ -static inline void gen_rldicr (DisasContext *ctx, int men, int shn) +static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn) { uint32_t sh, me; @@ -1473,7 +1473,7 @@ static inline void gen_rldicr (DisasContext *ctx, int men, int shn) } GEN_PPC64_R4(rldicr, 0x1E, 0x02); /* rldic - rldic. */ -static inline void gen_rldic (DisasContext *ctx, int mbn, int shn) +static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn) { uint32_t sh, mb; @@ -1483,7 +1483,8 @@ static inline void gen_rldic (DisasContext *ctx, int mbn, int shn) } GEN_PPC64_R4(rldic, 0x1E, 0x04); -static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me) +static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb, + uint32_t me) { gen_op_load_gpr_T0(rS(ctx->opcode)); gen_op_load_gpr_T1(rB(ctx->opcode)); @@ -1497,7 +1498,7 @@ static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me) } /* rldcl - rldcl. */ -static inline void gen_rldcl (DisasContext *ctx, int mbn) +static always_inline void gen_rldcl (DisasContext *ctx, int mbn) { uint32_t mb; @@ -1506,7 +1507,7 @@ static inline void gen_rldcl (DisasContext *ctx, int mbn) } GEN_PPC64_R2(rldcl, 0x1E, 0x08); /* rldcr - rldcr. */ -static inline void gen_rldcr (DisasContext *ctx, int men) +static always_inline void gen_rldcr (DisasContext *ctx, int men) { uint32_t me; @@ -1515,7 +1516,7 @@ static inline void gen_rldcr (DisasContext *ctx, int men) } GEN_PPC64_R2(rldcr, 0x1E, 0x09); /* rldimi - rldimi. */ -static inline void gen_rldimi (DisasContext *ctx, int mbn, int shn) +static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn) { uint64_t mask; uint32_t sh, mb; @@ -1583,7 +1584,7 @@ __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B); /* srad & srad. */ __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B); /* sradi & sradi. */ -static inline void gen_sradi (DisasContext *ctx, int n) +static always_inline void gen_sradi (DisasContext *ctx, int n) { uint64_t mask; int sh, mb, me; @@ -1937,7 +1938,8 @@ GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT) /*** Addressing modes ***/ /* Register indirect with immediate index : EA = (rA|0) + SIMM */ -static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl) +static always_inline void gen_addr_imm_index (DisasContext *ctx, + target_long maskl) { target_long simm = SIMM(ctx->opcode); @@ -1954,7 +1956,7 @@ static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl) #endif } -static inline void gen_addr_reg_index (DisasContext *ctx) +static always_inline void gen_addr_reg_index (DisasContext *ctx) { if (rA(ctx->opcode) == 0) { gen_op_load_gpr_T0(rB(ctx->opcode)); @@ -1968,7 +1970,7 @@ static inline void gen_addr_reg_index (DisasContext *ctx) #endif } -static inline void gen_addr_register (DisasContext *ctx) +static always_inline void gen_addr_register (DisasContext *ctx) { if (rA(ctx->opcode) == 0) { gen_op_reset_T0(); @@ -2964,7 +2966,8 @@ OP_ST_TABLE(fiwx); GEN_STXF(fiwx, 0x17, 0x1E, PPC_FLOAT_STFIWX); /*** Branch ***/ -static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest) +static always_inline void gen_goto_tb (DisasContext *ctx, int n, + target_ulong dest) { TranslationBlock *tb; tb = ctx->tb; @@ -2999,7 +3002,7 @@ static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest) } } -static inline void gen_setlr (DisasContext *ctx, target_ulong nip) +static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip) { #if defined(TARGET_PPC64) if (ctx->sf_mode != 0 && (nip >> 32)) @@ -3039,7 +3042,7 @@ GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW) #define BCOND_LR 1 #define BCOND_CTR 2 -static inline void gen_bcond (DisasContext *ctx, int type) +static always_inline void gen_bcond (DisasContext *ctx, int type) { target_ulong target = 0; target_ulong li; @@ -3399,7 +3402,7 @@ static void spr_noaccess (void *opaque, int sprn) #endif /* mfspr */ -static inline void gen_op_mfspr (DisasContext *ctx) +static always_inline void gen_op_mfspr (DisasContext *ctx) { void (*read_cb)(void *opaque, int sprn); uint32_t sprn = SPR(ctx->opcode); @@ -3765,7 +3768,8 @@ static GenOpFunc *gen_op_dcbz[4][4] = { #endif #endif -static inline void handler_dcbz (DisasContext *ctx, int dcache_line_size) +static always_inline void handler_dcbz (DisasContext *ctx, + int dcache_line_size) { int n; @@ -4913,8 +4917,9 @@ GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_BOOKE_EXT) } /* All 405 MAC instructions are translated here */ -static inline void gen_405_mulladd_insn (DisasContext *ctx, int opc2, int opc3, - int ra, int rb, int rt, int Rc) +static always_inline void gen_405_mulladd_insn (DisasContext *ctx, + int opc2, int opc3, + int ra, int rb, int rt, int Rc) { gen_op_load_gpr_T0(ra); gen_op_load_gpr_T1(rb); @@ -5551,13 +5556,13 @@ GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \ } /* Handler for undefined SPE opcodes */ -static inline void gen_speundef (DisasContext *ctx) +static always_inline void gen_speundef (DisasContext *ctx) { GEN_EXCP_INVAL(ctx); } /* SPE load and stores */ -static inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh) +static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh) { target_long simm = rB(ctx->opcode); @@ -5678,7 +5683,7 @@ static GenOpFunc *gen_op_spe_st##name[] = { \ #endif /* defined(CONFIG_USER_ONLY) */ #define GEN_SPE_LD(name, sh) \ -static inline void gen_evl##name (DisasContext *ctx) \ +static always_inline void gen_evl##name (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5690,7 +5695,7 @@ static inline void gen_evl##name (DisasContext *ctx) \ } #define GEN_SPE_LDX(name) \ -static inline void gen_evl##name##x (DisasContext *ctx) \ +static always_inline void gen_evl##name##x (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5707,7 +5712,7 @@ GEN_SPE_LD(name, sh); \ GEN_SPE_LDX(name) #define GEN_SPE_ST(name, sh) \ -static inline void gen_evst##name (DisasContext *ctx) \ +static always_inline void gen_evst##name (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5719,7 +5724,7 @@ static inline void gen_evst##name (DisasContext *ctx) \ } #define GEN_SPE_STX(name) \ -static inline void gen_evst##name##x (DisasContext *ctx) \ +static always_inline void gen_evst##name##x (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5741,7 +5746,7 @@ GEN_SPEOP_ST(name, sh) /* SPE arithmetic and logic */ #define GEN_SPEOP_ARITH2(name) \ -static inline void gen_##name (DisasContext *ctx) \ +static always_inline void gen_##name (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5754,7 +5759,7 @@ static inline void gen_##name (DisasContext *ctx) \ } #define GEN_SPEOP_ARITH1(name) \ -static inline void gen_##name (DisasContext *ctx) \ +static always_inline void gen_##name (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5766,7 +5771,7 @@ static inline void gen_##name (DisasContext *ctx) \ } #define GEN_SPEOP_COMP(name) \ -static inline void gen_##name (DisasContext *ctx) \ +static always_inline void gen_##name (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5806,7 +5811,7 @@ GEN_SPEOP_ARITH1(evextsh); GEN_SPEOP_ARITH1(evrndw); GEN_SPEOP_ARITH1(evcntlzw); GEN_SPEOP_ARITH1(evcntlsw); -static inline void gen_brinc (DisasContext *ctx) +static always_inline void gen_brinc (DisasContext *ctx) { /* Note: brinc is usable even if SPE is disabled */ gen_op_load_gpr64_T0(rA(ctx->opcode)); @@ -5816,7 +5821,7 @@ static inline void gen_brinc (DisasContext *ctx) } #define GEN_SPEOP_ARITH_IMM2(name) \ -static inline void gen_##name##i (DisasContext *ctx) \ +static always_inline void gen_##name##i (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5829,7 +5834,7 @@ static inline void gen_##name##i (DisasContext *ctx) \ } #define GEN_SPEOP_LOGIC_IMM2(name) \ -static inline void gen_##name##i (DisasContext *ctx) \ +static always_inline void gen_##name##i (DisasContext *ctx) \ { \ if (unlikely(!ctx->spe_enabled)) { \ GEN_EXCP_NO_AP(ctx); \ @@ -5852,7 +5857,7 @@ GEN_SPEOP_LOGIC_IMM2(evsrws); #define gen_evsrwiu gen_evsrwui GEN_SPEOP_LOGIC_IMM2(evrlw); -static inline void gen_evsplati (DisasContext *ctx) +static always_inline void gen_evsplati (DisasContext *ctx) { int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27; @@ -5860,7 +5865,7 @@ static inline void gen_evsplati (DisasContext *ctx) gen_op_store_T0_gpr64(rD(ctx->opcode)); } -static inline void gen_evsplatfi (DisasContext *ctx) +static always_inline void gen_evsplatfi (DisasContext *ctx) { uint32_t imm = rA(ctx->opcode) << 27; @@ -5901,7 +5906,7 @@ GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); //// GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); //// GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); //// -static inline void gen_evsel (DisasContext *ctx) +static always_inline void gen_evsel (DisasContext *ctx) { if (unlikely(!ctx->spe_enabled)) { GEN_EXCP_NO_AP(ctx); @@ -5991,13 +5996,13 @@ GEN_SPEOP_ST(who, 2); #endif #endif #define _GEN_OP_SPE_STWWE(suffix) \ -static inline void gen_op_spe_stwwe_##suffix (void) \ +static always_inline void gen_op_spe_stwwe_##suffix (void) \ { \ gen_op_srli32_T1_64(); \ gen_op_spe_stwwo_##suffix(); \ } #define _GEN_OP_SPE_STWWE_LE(suffix) \ -static inline void gen_op_spe_stwwe_le_##suffix (void) \ +static always_inline void gen_op_spe_stwwe_le_##suffix (void) \ { \ gen_op_srli32_T1_64(); \ gen_op_spe_stwwo_le_##suffix(); \ @@ -6006,12 +6011,12 @@ static inline void gen_op_spe_stwwe_le_##suffix (void) \ #define GEN_OP_SPE_STWWE(suffix) \ _GEN_OP_SPE_STWWE(suffix); \ _GEN_OP_SPE_STWWE_LE(suffix); \ -static inline void gen_op_spe_stwwe_64_##suffix (void) \ +static always_inline void gen_op_spe_stwwe_64_##suffix (void) \ { \ gen_op_srli32_T1_64(); \ gen_op_spe_stwwo_64_##suffix(); \ } \ -static inline void gen_op_spe_stwwe_le_64_##suffix (void) \ +static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \ { \ gen_op_srli32_T1_64(); \ gen_op_spe_stwwo_le_64_##suffix(); \ @@ -6031,21 +6036,21 @@ GEN_SPEOP_ST(wwe, 2); GEN_SPEOP_ST(wwo, 2); #define GEN_SPE_LDSPLAT(name, op, suffix) \ -static inline void gen_op_spe_l##name##_##suffix (void) \ +static always_inline void gen_op_spe_l##name##_##suffix (void) \ { \ gen_op_##op##_##suffix(); \ gen_op_splatw_T1_64(); \ } #define GEN_OP_SPE_LHE(suffix) \ -static inline void gen_op_spe_lhe_##suffix (void) \ +static always_inline void gen_op_spe_lhe_##suffix (void) \ { \ gen_op_spe_lh_##suffix(); \ gen_op_sli16_T1_64(); \ } #define GEN_OP_SPE_LHX(suffix) \ -static inline void gen_op_spe_lhx_##suffix (void) \ +static always_inline void gen_op_spe_lhx_##suffix (void) \ { \ gen_op_spe_lh_##suffix(); \ gen_op_extsh_T1_64(); \ @@ -6221,7 +6226,7 @@ GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE); /*** SPE floating-point extension ***/ #define GEN_SPEFPUOP_CONV(name) \ -static inline void gen_##name (DisasContext *ctx) \ +static always_inline void gen_##name (DisasContext *ctx) \ { \ gen_op_load_gpr64_T0(rB(ctx->opcode)); \ gen_op_##name(); \ @@ -6376,7 +6381,7 @@ GEN_OPCODE_MARK(end); /*****************************************************************************/ /* Misc PowerPC helpers */ -static inline uint32_t load_xer (CPUState *env) +static always_inline uint32_t load_xer (CPUState *env) { return (xer_so << XER_SO) | (xer_ov << XER_OV) | @@ -6507,9 +6512,9 @@ void cpu_dump_statistics (CPUState *env, FILE*f, } /*****************************************************************************/ -static inline int gen_intermediate_code_internal (CPUState *env, - TranslationBlock *tb, - int search_pc) +static always_inline int gen_intermediate_code_internal (CPUState *env, + TranslationBlock *tb, + int search_pc) { DisasContext ctx, *ctxp = &ctx; opc_handler_t **table, *handler; -- cgit v1.2.1