summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/aarch64/cache_helpers.S8
-rw-r--r--lib/aarch64/misc_helpers.S15
-rw-r--r--lib/cpus/aarch64/aem_generic.S3
-rw-r--r--lib/cpus/aarch64/cortex_a53.S6
-rw-r--r--lib/cpus/aarch64/cortex_a57.S10
-rw-r--r--lib/cpus/aarch64/cortex_a72.S9
-rw-r--r--lib/cpus/aarch64/cpu_helpers.S7
-rw-r--r--lib/locks/exclusive/spinlock.S2
-rw-r--r--lib/semihosting/aarch64/semihosting_call.S1
9 files changed, 59 insertions, 2 deletions
diff --git a/lib/aarch64/cache_helpers.S b/lib/aarch64/cache_helpers.S
index dc601021..0dbab1bd 100644
--- a/lib/aarch64/cache_helpers.S
+++ b/lib/aarch64/cache_helpers.S
@@ -56,6 +56,7 @@ flush_loop:
b.lo flush_loop
dsb sy
ret
+endfunc flush_dcache_range
/* ------------------------------------------
@@ -75,6 +76,7 @@ inv_loop:
b.lo inv_loop
dsb sy
ret
+endfunc inv_dcache_range
/* ---------------------------------------------------------------
@@ -154,6 +156,7 @@ level_done:
isb
exit:
ret
+endfunc do_dcsw_op
dcsw_loop_table:
dcsw_loop isw
@@ -163,10 +166,12 @@ dcsw_loop_table:
func dcsw_op_louis
dcsw_op #LOUIS_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
+endfunc dcsw_op_louis
func dcsw_op_all
dcsw_op #LOC_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
+endfunc dcsw_op_all
/* ---------------------------------------------------------------
* Helper macro for data cache operations by set/way for the
@@ -189,6 +194,7 @@ func dcsw_op_all
*/
func dcsw_op_level1
dcsw_op_level #(1 << LEVEL_SHIFT)
+endfunc dcsw_op_level1
/* ---------------------------------------------------------------
* Data cache operations by set/way for level 2 cache
@@ -199,6 +205,7 @@ func dcsw_op_level1
*/
func dcsw_op_level2
dcsw_op_level #(2 << LEVEL_SHIFT)
+endfunc dcsw_op_level2
/* ---------------------------------------------------------------
* Data cache operations by set/way for level 3 cache
@@ -209,3 +216,4 @@ func dcsw_op_level2
*/
func dcsw_op_level3
dcsw_op_level #(3 << LEVEL_SHIFT)
+endfunc dcsw_op_level3
diff --git a/lib/aarch64/misc_helpers.S b/lib/aarch64/misc_helpers.S
index f605bf40..5f80b597 100644
--- a/lib/aarch64/misc_helpers.S
+++ b/lib/aarch64/misc_helpers.S
@@ -53,6 +53,7 @@ func get_afflvl_shift
mov x1, #MPIDR_AFFLVL_SHIFT
lsl x0, x0, x1
ret
+endfunc get_afflvl_shift
func mpidr_mask_lower_afflvls
cmp x1, #3
@@ -62,14 +63,17 @@ func mpidr_mask_lower_afflvls
lsr x0, x0, x2
lsl x0, x0, x2
ret
+endfunc mpidr_mask_lower_afflvls
func eret
eret
+endfunc eret
func smc
smc #0
+endfunc smc
/* -----------------------------------------------------------------------
* void zeromem16(void *mem, unsigned int length);
@@ -97,7 +101,9 @@ z_loop1:
b.eq z_end
strb wzr, [x0], #1
b z_loop1
-z_end: ret
+z_end:
+ ret
+endfunc zeromem16
/* --------------------------------------------------------------------------
@@ -129,7 +135,9 @@ m_loop1:
strb w3, [x0], #1
subs x2, x2, #1
b.ne m_loop1
-m_end: ret
+m_end:
+ ret
+endfunc memcpy16
/* ---------------------------------------------------------------------------
* Disable the MMU at EL3
@@ -148,11 +156,13 @@ do_disable_mmu:
isb // ensure MMU is off
mov x0, #DCCISW // DCache clean and invalidate
b dcsw_op_all
+endfunc disable_mmu_el3
func disable_mmu_icache_el3
mov x1, #(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
b do_disable_mmu
+endfunc disable_mmu_icache_el3
/* ---------------------------------------------------------------------------
* Enable the use of VFP at EL3
@@ -169,4 +179,5 @@ func enable_vfp
msr cptr_el3, x0
isb
ret
+endfunc enable_vfp
#endif
diff --git a/lib/cpus/aarch64/aem_generic.S b/lib/cpus/aarch64/aem_generic.S
index 58a64a6c..ee53058e 100644
--- a/lib/cpus/aarch64/aem_generic.S
+++ b/lib/cpus/aarch64/aem_generic.S
@@ -49,6 +49,7 @@ func aem_generic_core_pwr_dwn
* ---------------------------------------------
*/
b dcsw_op_louis
+endfunc aem_generic_core_pwr_dwn
func aem_generic_cluster_pwr_dwn
@@ -67,6 +68,7 @@ func aem_generic_cluster_pwr_dwn
*/
mov x0, #DCCISW
b dcsw_op_all
+endfunc aem_generic_cluster_pwr_dwn
/* ---------------------------------------------
* This function provides cpu specific
@@ -80,6 +82,7 @@ func aem_generic_cluster_pwr_dwn
func aem_generic_cpu_reg_dump
mov x6, #0 /* no registers to report */
ret
+endfunc aem_generic_cpu_reg_dump
/* cpu_ops for Base AEM FVP */
diff --git a/lib/cpus/aarch64/cortex_a53.S b/lib/cpus/aarch64/cortex_a53.S
index 188f3c1e..e149e6e6 100644
--- a/lib/cpus/aarch64/cortex_a53.S
+++ b/lib/cpus/aarch64/cortex_a53.S
@@ -44,6 +44,7 @@ func cortex_a53_disable_dcache
msr sctlr_el3, x1
isb
ret
+endfunc cortex_a53_disable_dcache
/* ---------------------------------------------
* Disable intra-cluster coherency
@@ -56,6 +57,7 @@ func cortex_a53_disable_smp
isb
dsb sy
ret
+endfunc cortex_a53_disable_smp
func cortex_a53_reset_func
/* ---------------------------------------------
@@ -72,6 +74,7 @@ func cortex_a53_reset_func
isb
skip_smp_setup:
ret
+endfunc cortex_a53_reset_func
func cortex_a53_core_pwr_dwn
mov x18, x30
@@ -95,6 +98,7 @@ func cortex_a53_core_pwr_dwn
*/
mov x30, x18
b cortex_a53_disable_smp
+endfunc cortex_a53_core_pwr_dwn
func cortex_a53_cluster_pwr_dwn
mov x18, x30
@@ -131,6 +135,7 @@ func cortex_a53_cluster_pwr_dwn
*/
mov x30, x18
b cortex_a53_disable_smp
+endfunc cortex_a53_cluster_pwr_dwn
/* ---------------------------------------------
* This function provides cortex_a53 specific
@@ -149,5 +154,6 @@ func cortex_a53_cpu_reg_dump
adr x6, cortex_a53_regs
mrs x8, CPUECTLR_EL1
ret
+endfunc cortex_a53_cpu_reg_dump
declare_cpu_ops cortex_a53, CORTEX_A53_MIDR
diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S
index eb6c736f..05799d61 100644
--- a/lib/cpus/aarch64/cortex_a57.S
+++ b/lib/cpus/aarch64/cortex_a57.S
@@ -45,6 +45,7 @@ func cortex_a57_disable_dcache
msr sctlr_el3, x1
isb
ret
+endfunc cortex_a57_disable_dcache
/* ---------------------------------------------
* Disable all types of L2 prefetches.
@@ -60,6 +61,7 @@ func cortex_a57_disable_l2_prefetch
isb
dsb ish
ret
+endfunc cortex_a57_disable_l2_prefetch
/* ---------------------------------------------
* Disable intra-cluster coherency
@@ -70,6 +72,7 @@ func cortex_a57_disable_smp
bic x0, x0, #CPUECTLR_SMP_BIT
msr CPUECTLR_EL1, x0
ret
+endfunc cortex_a57_disable_smp
/* ---------------------------------------------
* Disable debug interfaces
@@ -81,6 +84,7 @@ func cortex_a57_disable_ext_debug
isb
dsb sy
ret
+endfunc cortex_a57_disable_ext_debug
/* --------------------------------------------------
* Errata Workaround for Cortex A57 Errata #806969.
@@ -113,6 +117,7 @@ apply_806969:
msr CPUACTLR_EL1, x1
skip_806969:
ret
+endfunc errata_a57_806969_wa
/* ---------------------------------------------------
@@ -146,6 +151,7 @@ apply_813420:
msr CPUACTLR_EL1, x1
skip_813420:
ret
+endfunc errata_a57_813420_wa
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
@@ -188,6 +194,7 @@ func cortex_a57_reset_func
skip_smp_setup:
isb
ret x19
+endfunc cortex_a57_reset_func
/* ----------------------------------------------------
* The CPU Ops core power down function for Cortex-A57.
@@ -227,6 +234,7 @@ func cortex_a57_core_pwr_dwn
*/
mov x30, x18
b cortex_a57_disable_ext_debug
+endfunc cortex_a57_core_pwr_dwn
/* -------------------------------------------------------
* The CPU Ops cluster power down function for Cortex-A57.
@@ -280,6 +288,7 @@ func cortex_a57_cluster_pwr_dwn
*/
mov x30, x18
b cortex_a57_disable_ext_debug
+endfunc cortex_a57_cluster_pwr_dwn
/* ---------------------------------------------
* This function provides cortex_a57 specific
@@ -298,6 +307,7 @@ func cortex_a57_cpu_reg_dump
adr x6, cortex_a57_regs
mrs x8, CPUECTLR_EL1
ret
+endfunc cortex_a57_cpu_reg_dump
declare_cpu_ops cortex_a57, CORTEX_A57_MIDR
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index 2d054fcc..eb37f2ca 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -44,6 +44,7 @@ func cortex_a72_disable_dcache
msr sctlr_el3, x1
isb
ret
+endfunc cortex_a72_disable_dcache
/* ---------------------------------------------
* Disable all types of L2 prefetches.
@@ -58,6 +59,7 @@ func cortex_a72_disable_l2_prefetch
msr CPUECTLR_EL1, x0
isb
ret
+endfunc cortex_a72_disable_l2_prefetch
/* ---------------------------------------------
* Disable the load-store hardware prefetcher.
@@ -70,6 +72,7 @@ func cortex_a72_disable_hw_prefetcher
isb
dsb ish
ret
+endfunc cortex_a72_disable_hw_prefetcher
/* ---------------------------------------------
* Disable intra-cluster coherency
@@ -80,6 +83,7 @@ func cortex_a72_disable_smp
bic x0, x0, #CPUECTLR_SMP_BIT
msr CPUECTLR_EL1, x0
ret
+endfunc cortex_a72_disable_smp
/* ---------------------------------------------
* Disable debug interfaces
@@ -91,6 +95,7 @@ func cortex_a72_disable_ext_debug
isb
dsb sy
ret
+endfunc cortex_a72_disable_ext_debug
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
@@ -106,6 +111,7 @@ func cortex_a72_reset_func
msr CPUECTLR_EL1, x0
isb
ret
+endfunc cortex_a72_reset_func
/* ----------------------------------------------------
* The CPU Ops core power down function for Cortex-A72.
@@ -151,6 +157,7 @@ func cortex_a72_core_pwr_dwn
*/
mov x30, x18
b cortex_a72_disable_ext_debug
+endfunc cortex_a72_core_pwr_dwn
/* -------------------------------------------------------
* The CPU Ops cluster power down function for Cortex-A72.
@@ -211,6 +218,7 @@ func cortex_a72_cluster_pwr_dwn
*/
mov x30, x18
b cortex_a72_disable_ext_debug
+endfunc cortex_a72_cluster_pwr_dwn
/* ---------------------------------------------
* This function provides cortex_a72 specific
@@ -229,6 +237,7 @@ func cortex_a72_cpu_reg_dump
adr x6, cortex_a72_regs
mrs x8, CPUECTLR_EL1
ret
+endfunc cortex_a72_cpu_reg_dump
declare_cpu_ops cortex_a72, CORTEX_A72_MIDR
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index 24c283ab..e8a13929 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -67,6 +67,7 @@ func reset_handler
br x2
1:
ret
+endfunc reset_handler
#endif /* IMAGE_BL1 || IMAGE_BL31 */
@@ -88,6 +89,7 @@ func prepare_core_pwr_dwn
/* Get the cpu_ops core_pwr_dwn handler */
ldr x1, [x0, #CPU_PWR_DWN_CORE]
br x1
+endfunc prepare_core_pwr_dwn
/*
* The prepare cluster power down function for all platforms. After
@@ -106,6 +108,7 @@ func prepare_cluster_pwr_dwn
/* Get the cpu_ops cluster_pwr_dwn handler */
ldr x1, [x0, #CPU_PWR_DWN_CLUSTER]
br x1
+endfunc prepare_cluster_pwr_dwn
/*
@@ -129,6 +132,7 @@ func init_cpu_ops
mov x30, x10
1:
ret
+endfunc init_cpu_ops
#endif /* IMAGE_BL31 */
#if IMAGE_BL31 && CRASH_REPORTING
@@ -153,6 +157,7 @@ func do_cpu_reg_dump
1:
mov x30, x16
ret
+endfunc do_cpu_reg_dump
#endif
/*
@@ -197,6 +202,7 @@ func get_cpu_ops_ptr
sub x0, x4, #(CPU_OPS_SIZE + CPU_MIDR)
error_exit:
ret
+endfunc get_cpu_ops_ptr
#if DEBUG
/*
@@ -221,5 +227,6 @@ func print_revision_warning
bl asm_print_str
1:
ret x5
+endfunc print_revision_warning
#endif
diff --git a/lib/locks/exclusive/spinlock.S b/lib/locks/exclusive/spinlock.S
index 5eae2b08..772f14e9 100644
--- a/lib/locks/exclusive/spinlock.S
+++ b/lib/locks/exclusive/spinlock.S
@@ -43,8 +43,10 @@ l2: ldaxr w1, [x0]
stxr w1, w2, [x0]
cbnz w1, l2
ret
+endfunc spin_lock
func spin_unlock
stlr wzr, [x0]
ret
+endfunc spin_unlock
diff --git a/lib/semihosting/aarch64/semihosting_call.S b/lib/semihosting/aarch64/semihosting_call.S
index e6a96752..9fa81413 100644
--- a/lib/semihosting/aarch64/semihosting_call.S
+++ b/lib/semihosting/aarch64/semihosting_call.S
@@ -35,3 +35,4 @@
func semihosting_call
hlt #0xf000
ret
+endfunc semihosting_call