aboutsummaryrefslogtreecommitdiff
path: root/arch/arm/cpu/armv8/cache.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/cpu/armv8/cache.S')
-rw-r--r--arch/arm/cpu/armv8/cache.S19
1 files changed, 7 insertions, 12 deletions
diff --git a/arch/arm/cpu/armv8/cache.S b/arch/arm/cpu/armv8/cache.S
index e04907dd8c..d1cee23437 100644
--- a/arch/arm/cpu/armv8/cache.S
+++ b/arch/arm/cpu/armv8/cache.S
@@ -27,13 +27,11 @@ ENTRY(__asm_dcache_level)
msr csselr_el1, x12 /* select cache level */
isb /* sync change of cssidr_el1 */
mrs x6, ccsidr_el1 /* read the new cssidr_el1 */
- and x2, x6, #7 /* x2 <- log2(cache line size)-4 */
+ ubfx x2, x6, #0, #3 /* x2 <- log2(cache line size)-4 */
+ ubfx x3, x6, #3, #10 /* x3 <- number of cache ways - 1 */
+ ubfx x4, x6, #13, #15 /* x4 <- number of cache sets - 1 */
add x2, x2, #4 /* x2 <- log2(cache line size) */
- mov x3, #0x3ff
- and x3, x3, x6, lsr #3 /* x3 <- max number of #ways */
clz w5, w3 /* bit position of #ways */
- mov x4, #0x7fff
- and x4, x4, x6, lsr #13 /* x4 <- max number of #sets */
/* x12 <- cache level << 1 */
/* x2 <- line length offset */
/* x3 <- number of cache ways - 1 */
@@ -72,8 +70,7 @@ ENTRY(__asm_dcache_all)
mov x1, x0
dsb sy
mrs x10, clidr_el1 /* read clidr_el1 */
- lsr x11, x10, #24
- and x11, x11, #0x7 /* x11 <- loc */
+ ubfx x11, x10, #24, #3 /* x11 <- loc */
cbz x11, finished /* if loc is 0, exit */
mov x15, lr
mov x0, #0 /* start flush at cache level 0 */
@@ -83,8 +80,7 @@ ENTRY(__asm_dcache_all)
/* x15 <- return address */
loop_level:
- lsl x12, x0, #1
- add x12, x12, x0 /* x0 <- tripled cache level */
+ add x12, x0, x0, lsl #1 /* x12 <- tripled cache level */
lsr x12, x10, x12
and x12, x12, #7 /* x12 <- cache type */
cmp x12, #2
@@ -131,8 +127,7 @@ ENDPROC(__asm_invalidate_dcache_all)
.pushsection .text.__asm_flush_dcache_range, "ax"
ENTRY(__asm_flush_dcache_range)
mrs x3, ctr_el0
- lsr x3, x3, #16
- and x3, x3, #0xf
+ ubfx x3, x3, #16, #4
mov x2, #4
lsl x2, x2, x3 /* cache line size */
@@ -158,7 +153,7 @@ ENDPROC(__asm_flush_dcache_range)
.pushsection .text.__asm_invalidate_dcache_range, "ax"
ENTRY(__asm_invalidate_dcache_range)
mrs x3, ctr_el0
- ubfm x3, x3, #16, #19
+ ubfx x3, x3, #16, #4
mov x2, #4
lsl x2, x2, x3 /* cache line size */