summaryrefslogtreecommitdiff
path: root/arch/loongarch/lib
diff options
context:
space:
mode:
Diffstat (limited to 'arch/loongarch/lib')
-rw-r--r--arch/loongarch/lib/bswapdi.c13
-rw-r--r--arch/loongarch/lib/bswapsi.c13
-rw-r--r--arch/loongarch/lib/clear_user.S22
-rw-r--r--arch/loongarch/lib/copy_user.S28
-rw-r--r--arch/loongarch/lib/dump_tlb.c14
-rw-r--r--arch/loongarch/lib/unaligned.S72
6 files changed, 104 insertions, 58 deletions
diff --git a/arch/loongarch/lib/bswapdi.c b/arch/loongarch/lib/bswapdi.c
new file mode 100644
index 000000000000..88242dc7de17
--- /dev/null
+++ b/arch/loongarch/lib/bswapdi.c
@@ -0,0 +1,13 @@
+// SPDX-License-Identifier: GPL-2.0
+#include <linux/export.h>
+#include <linux/compiler.h>
+#include <uapi/linux/swab.h>
+
+/* To silence -Wmissing-prototypes. */
+unsigned long long __bswapdi2(unsigned long long u);
+
+unsigned long long notrace __bswapdi2(unsigned long long u)
+{
+ return ___constant_swab64(u);
+}
+EXPORT_SYMBOL(__bswapdi2);
diff --git a/arch/loongarch/lib/bswapsi.c b/arch/loongarch/lib/bswapsi.c
new file mode 100644
index 000000000000..2ed655497de5
--- /dev/null
+++ b/arch/loongarch/lib/bswapsi.c
@@ -0,0 +1,13 @@
+// SPDX-License-Identifier: GPL-2.0
+#include <linux/export.h>
+#include <linux/compiler.h>
+#include <uapi/linux/swab.h>
+
+/* To silence -Wmissing-prototypes. */
+unsigned int __bswapsi2(unsigned int u);
+
+unsigned int notrace __bswapsi2(unsigned int u)
+{
+ return ___constant_swab32(u);
+}
+EXPORT_SYMBOL(__bswapsi2);
diff --git a/arch/loongarch/lib/clear_user.S b/arch/loongarch/lib/clear_user.S
index 7a0db643b286..58c667dde882 100644
--- a/arch/loongarch/lib/clear_user.S
+++ b/arch/loongarch/lib/clear_user.S
@@ -13,11 +13,15 @@
#include <asm/unwind_hints.h>
SYM_FUNC_START(__clear_user)
+#ifdef CONFIG_32BIT
+ b __clear_user_generic
+#else
/*
* Some CPUs support hardware unaligned access
*/
ALTERNATIVE "b __clear_user_generic", \
"b __clear_user_fast", CPU_FEATURE_UAL
+#endif
SYM_FUNC_END(__clear_user)
EXPORT_SYMBOL(__clear_user)
@@ -29,19 +33,20 @@ EXPORT_SYMBOL(__clear_user)
* a1: size
*/
SYM_FUNC_START(__clear_user_generic)
- beqz a1, 2f
+ beqz a1, 2f
-1: st.b zero, a0, 0
- addi.d a0, a0, 1
- addi.d a1, a1, -1
- bgtz a1, 1b
+1: st.b zero, a0, 0
+ PTR_ADDI a0, a0, 1
+ PTR_ADDI a1, a1, -1
+ bgtz a1, 1b
-2: move a0, a1
- jr ra
+2: move a0, a1
+ jr ra
- _asm_extable 1b, 2b
+ _asm_extable 1b, 2b
SYM_FUNC_END(__clear_user_generic)
+#ifdef CONFIG_64BIT
/*
* unsigned long __clear_user_fast(void *addr, unsigned long size)
*
@@ -207,3 +212,4 @@ SYM_FUNC_START(__clear_user_fast)
SYM_FUNC_END(__clear_user_fast)
STACK_FRAME_NON_STANDARD __clear_user_fast
+#endif
diff --git a/arch/loongarch/lib/copy_user.S b/arch/loongarch/lib/copy_user.S
index 095ce9181c6c..c7264b779f6e 100644
--- a/arch/loongarch/lib/copy_user.S
+++ b/arch/loongarch/lib/copy_user.S
@@ -13,11 +13,15 @@
#include <asm/unwind_hints.h>
SYM_FUNC_START(__copy_user)
+#ifdef CONFIG_32BIT
+ b __copy_user_generic
+#else
/*
* Some CPUs support hardware unaligned access
*/
ALTERNATIVE "b __copy_user_generic", \
"b __copy_user_fast", CPU_FEATURE_UAL
+#endif
SYM_FUNC_END(__copy_user)
EXPORT_SYMBOL(__copy_user)
@@ -30,22 +34,23 @@ EXPORT_SYMBOL(__copy_user)
* a2: n
*/
SYM_FUNC_START(__copy_user_generic)
- beqz a2, 3f
+ beqz a2, 3f
-1: ld.b t0, a1, 0
-2: st.b t0, a0, 0
- addi.d a0, a0, 1
- addi.d a1, a1, 1
- addi.d a2, a2, -1
- bgtz a2, 1b
+1: ld.b t0, a1, 0
+2: st.b t0, a0, 0
+ PTR_ADDI a0, a0, 1
+ PTR_ADDI a1, a1, 1
+ PTR_ADDI a2, a2, -1
+ bgtz a2, 1b
-3: move a0, a2
- jr ra
+3: move a0, a2
+ jr ra
- _asm_extable 1b, 3b
- _asm_extable 2b, 3b
+ _asm_extable 1b, 3b
+ _asm_extable 2b, 3b
SYM_FUNC_END(__copy_user_generic)
+#ifdef CONFIG_64BIT
/*
* unsigned long __copy_user_fast(void *to, const void *from, unsigned long n)
*
@@ -281,3 +286,4 @@ SYM_FUNC_START(__copy_user_fast)
SYM_FUNC_END(__copy_user_fast)
STACK_FRAME_NON_STANDARD __copy_user_fast
+#endif
diff --git a/arch/loongarch/lib/dump_tlb.c b/arch/loongarch/lib/dump_tlb.c
index 0b886a6e260f..e1cdad7a676e 100644
--- a/arch/loongarch/lib/dump_tlb.c
+++ b/arch/loongarch/lib/dump_tlb.c
@@ -20,9 +20,9 @@ void dump_tlb_regs(void)
pr_info("Index : 0x%0x\n", read_csr_tlbidx());
pr_info("PageSize : 0x%0x\n", read_csr_pagesize());
- pr_info("EntryHi : 0x%0*lx\n", field, read_csr_entryhi());
- pr_info("EntryLo0 : 0x%0*lx\n", field, read_csr_entrylo0());
- pr_info("EntryLo1 : 0x%0*lx\n", field, read_csr_entrylo1());
+ pr_info("EntryHi : 0x%0*lx\n", field, (unsigned long)read_csr_entryhi());
+ pr_info("EntryLo0 : 0x%0*lx\n", field, (unsigned long)read_csr_entrylo0());
+ pr_info("EntryLo1 : 0x%0*lx\n", field, (unsigned long)read_csr_entrylo1());
}
static void dump_tlb(int first, int last)
@@ -73,12 +73,16 @@ static void dump_tlb(int first, int last)
vwidth, (entryhi & ~0x1fffUL), asidwidth, asid & asidmask);
/* NR/NX are in awkward places, so mask them off separately */
+#ifdef CONFIG_64BIT
pa = entrylo0 & ~(ENTRYLO_NR | ENTRYLO_NX);
+#endif
pa = pa & PAGE_MASK;
pr_cont("\n\t[");
+#ifdef CONFIG_64BIT
pr_cont("nr=%d nx=%d ",
(entrylo0 & ENTRYLO_NR) ? 1 : 0,
(entrylo0 & ENTRYLO_NX) ? 1 : 0);
+#endif
pr_cont("pa=0x%0*llx c=%d d=%d v=%d g=%d plv=%lld] [",
pwidth, pa, c0,
(entrylo0 & ENTRYLO_D) ? 1 : 0,
@@ -86,11 +90,15 @@ static void dump_tlb(int first, int last)
(entrylo0 & ENTRYLO_G) ? 1 : 0,
(entrylo0 & ENTRYLO_PLV) >> ENTRYLO_PLV_SHIFT);
/* NR/NX are in awkward places, so mask them off separately */
+#ifdef CONFIG_64BIT
pa = entrylo1 & ~(ENTRYLO_NR | ENTRYLO_NX);
+#endif
pa = pa & PAGE_MASK;
+#ifdef CONFIG_64BIT
pr_cont("nr=%d nx=%d ",
(entrylo1 & ENTRYLO_NR) ? 1 : 0,
(entrylo1 & ENTRYLO_NX) ? 1 : 0);
+#endif
pr_cont("pa=0x%0*llx c=%d d=%d v=%d g=%d plv=%lld]\n",
pwidth, pa, c1,
(entrylo1 & ENTRYLO_D) ? 1 : 0,
diff --git a/arch/loongarch/lib/unaligned.S b/arch/loongarch/lib/unaligned.S
index 185f82d85810..470c0bfa3463 100644
--- a/arch/loongarch/lib/unaligned.S
+++ b/arch/loongarch/lib/unaligned.S
@@ -24,35 +24,35 @@
* a3: sign
*/
SYM_FUNC_START(unaligned_read)
- beqz a2, 5f
+ beqz a2, 5f
- li.w t2, 0
- addi.d t0, a2, -1
- slli.d t1, t0, 3
- add.d a0, a0, t0
+ li.w t2, 0
+ LONG_ADDI t0, a2, -1
+ PTR_SLLI t1, t0, LONGLOG
+ PTR_ADD a0, a0, t0
- beqz a3, 2f
-1: ld.b t3, a0, 0
- b 3f
+ beqz a3, 2f
+1: ld.b t3, a0, 0
+ b 3f
-2: ld.bu t3, a0, 0
-3: sll.d t3, t3, t1
- or t2, t2, t3
- addi.d t1, t1, -8
- addi.d a0, a0, -1
- addi.d a2, a2, -1
- bgtz a2, 2b
-4: st.d t2, a1, 0
+2: ld.bu t3, a0, 0
+3: LONG_SLLV t3, t3, t1
+ or t2, t2, t3
+ LONG_ADDI t1, t1, -8
+ PTR_ADDI a0, a0, -1
+ PTR_ADDI a2, a2, -1
+ bgtz a2, 2b
+4: LONG_S t2, a1, 0
- move a0, a2
- jr ra
+ move a0, a2
+ jr ra
-5: li.w a0, -EFAULT
- jr ra
+5: li.w a0, -EFAULT
+ jr ra
- _asm_extable 1b, .L_fixup_handle_unaligned
- _asm_extable 2b, .L_fixup_handle_unaligned
- _asm_extable 4b, .L_fixup_handle_unaligned
+ _asm_extable 1b, .L_fixup_handle_unaligned
+ _asm_extable 2b, .L_fixup_handle_unaligned
+ _asm_extable 4b, .L_fixup_handle_unaligned
SYM_FUNC_END(unaligned_read)
/*
@@ -63,21 +63,21 @@ SYM_FUNC_END(unaligned_read)
* a2: n
*/
SYM_FUNC_START(unaligned_write)
- beqz a2, 3f
+ beqz a2, 3f
- li.w t0, 0
-1: srl.d t1, a1, t0
-2: st.b t1, a0, 0
- addi.d t0, t0, 8
- addi.d a2, a2, -1
- addi.d a0, a0, 1
- bgtz a2, 1b
+ li.w t0, 0
+1: LONG_SRLV t1, a1, t0
+2: st.b t1, a0, 0
+ LONG_ADDI t0, t0, 8
+ PTR_ADDI a2, a2, -1
+ PTR_ADDI a0, a0, 1
+ bgtz a2, 1b
- move a0, a2
- jr ra
+ move a0, a2
+ jr ra
-3: li.w a0, -EFAULT
- jr ra
+3: li.w a0, -EFAULT
+ jr ra
- _asm_extable 2b, .L_fixup_handle_unaligned
+ _asm_extable 2b, .L_fixup_handle_unaligned
SYM_FUNC_END(unaligned_write)