MIPS: Use WSBH/DSBH/DSHD on Loongson 3A
Signed-off-by: chenj <chenj@lemote.com> Cc: linux-mips@linux-mips.org Cc: chenhc@lemote.com Patchwork: https://patchwork.linux-mips.org/patch/7542/ Patchwork: https://patchwork.linux-mips.org/patch/7550/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
This commit is contained in:
parent
0f33be009b
commit
3c09bae43b
@ -231,6 +231,16 @@
|
||||
#define cpu_has_clo_clz cpu_has_mips_r
|
||||
#endif
|
||||
|
||||
/*
|
||||
* MIPS32 R2, MIPS64 R2, Loongson 3A and Octeon have WSBH.
|
||||
* MIPS64 R2, Loongson 3A and Octeon have WSBH, DSBH and DSHD.
|
||||
* This indicates the availability of WSBH and in case of 64 bit CPUs also
|
||||
* DSBH and DSHD.
|
||||
*/
|
||||
#ifndef cpu_has_wsbh
|
||||
#define cpu_has_wsbh cpu_has_mips_r2
|
||||
#endif
|
||||
|
||||
#ifndef cpu_has_dsp
|
||||
#define cpu_has_dsp (cpu_data[0].ases & MIPS_ASE_DSP)
|
||||
#endif
|
||||
|
@ -57,6 +57,7 @@
|
||||
#define cpu_has_vint 0
|
||||
#define cpu_has_veic 0
|
||||
#define cpu_hwrena_impl_bits 0xc0000000
|
||||
#define cpu_has_wsbh 1
|
||||
|
||||
#define cpu_has_rixi (cpu_data[0].cputype != CPU_CAVIUM_OCTEON)
|
||||
|
||||
|
@ -59,4 +59,6 @@
|
||||
#define cpu_has_watch 1
|
||||
#define cpu_has_local_ebase 0
|
||||
|
||||
#define cpu_has_wsbh IS_ENABLED(CONFIG_CPU_LOONGSON3)
|
||||
|
||||
#endif /* __ASM_MACH_LOONGSON_CPU_FEATURE_OVERRIDES_H */
|
||||
|
@ -13,12 +13,16 @@
|
||||
|
||||
#define __SWAB_64_THRU_32__
|
||||
|
||||
#if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
|
||||
#if (defined(__mips_isa_rev) && (__mips_isa_rev >= 2)) || \
|
||||
defined(_MIPS_ARCH_LOONGSON3A)
|
||||
|
||||
static inline __attribute_const__ __u16 __arch_swab16(__u16 x)
|
||||
{
|
||||
__asm__(
|
||||
" .set push \n"
|
||||
" .set arch=mips32r2 \n"
|
||||
" wsbh %0, %1 \n"
|
||||
" .set pop \n"
|
||||
: "=r" (x)
|
||||
: "r" (x));
|
||||
|
||||
@ -29,8 +33,11 @@ static inline __attribute_const__ __u16 __arch_swab16(__u16 x)
|
||||
static inline __attribute_const__ __u32 __arch_swab32(__u32 x)
|
||||
{
|
||||
__asm__(
|
||||
" .set push \n"
|
||||
" .set arch=mips32r2 \n"
|
||||
" wsbh %0, %1 \n"
|
||||
" rotr %0, %0, 16 \n"
|
||||
" .set pop \n"
|
||||
: "=r" (x)
|
||||
: "r" (x));
|
||||
|
||||
@ -46,8 +53,11 @@ static inline __attribute_const__ __u32 __arch_swab32(__u32 x)
|
||||
static inline __attribute_const__ __u64 __arch_swab64(__u64 x)
|
||||
{
|
||||
__asm__(
|
||||
" dsbh %0, %1\n"
|
||||
" dshd %0, %0"
|
||||
" .set push \n"
|
||||
" .set arch=mips64r2 \n"
|
||||
" dsbh %0, %1 \n"
|
||||
" dshd %0, %0 \n"
|
||||
" .set pop \n"
|
||||
: "=r" (x)
|
||||
: "r" (x));
|
||||
|
||||
@ -55,5 +65,5 @@ static inline __attribute_const__ __u64 __arch_swab64(__u64 x)
|
||||
}
|
||||
#define __arch_swab64 __arch_swab64
|
||||
#endif /* __mips64 */
|
||||
#endif /* MIPS R2 or newer */
|
||||
#endif /* MIPS R2 or newer or Loongson 3A */
|
||||
#endif /* _ASM_SWAB_H */
|
||||
|
@ -277,9 +277,12 @@ LEAF(csum_partial)
|
||||
#endif
|
||||
|
||||
/* odd buffer alignment? */
|
||||
#ifdef CONFIG_CPU_MIPSR2
|
||||
#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_LOONGSON3)
|
||||
.set push
|
||||
.set arch=mips32r2
|
||||
wsbh v1, sum
|
||||
movn sum, v1, t7
|
||||
.set pop
|
||||
#else
|
||||
beqz t7, 1f /* odd buffer alignment? */
|
||||
lui v1, 0x00ff
|
||||
@ -726,9 +729,12 @@ LEAF(csum_partial)
|
||||
addu sum, v1
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_CPU_MIPSR2
|
||||
#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_LOONGSON3)
|
||||
.set push
|
||||
.set arch=mips32r2
|
||||
wsbh v1, sum
|
||||
movn sum, v1, odd
|
||||
.set pop
|
||||
#else
|
||||
beqz odd, 1f /* odd buffer alignment? */
|
||||
lui v1, 0x00ff
|
||||
|
@ -1263,7 +1263,7 @@ static int build_body(struct jit_ctx *ctx)
|
||||
emit_half_load(r_A, r_skb, off, ctx);
|
||||
#ifdef CONFIG_CPU_LITTLE_ENDIAN
|
||||
/* This needs little endian fixup */
|
||||
if (cpu_has_mips_r2) {
|
||||
if (cpu_has_wsbh) {
|
||||
/* R2 and later have the wsbh instruction */
|
||||
emit_wsbh(r_A, r_A, ctx);
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user