diff options
Diffstat (limited to 'arch/arm64/kernel/entry.S')
-rw-r--r-- | arch/arm64/kernel/entry.S | 229 |
1 files changed, 192 insertions, 37 deletions
diff --git a/arch/arm64/kernel/entry.S b/arch/arm64/kernel/entry.S index 9ff717d93653..739ec3b359b0 100644 --- a/arch/arm64/kernel/entry.S +++ b/arch/arm64/kernel/entry.S | |||
@@ -29,6 +29,7 @@ | |||
29 | #include <asm/esr.h> | 29 | #include <asm/esr.h> |
30 | #include <asm/irq.h> | 30 | #include <asm/irq.h> |
31 | #include <asm/memory.h> | 31 | #include <asm/memory.h> |
32 | #include <asm/mmu.h> | ||
32 | #include <asm/ptrace.h> | 33 | #include <asm/ptrace.h> |
33 | #include <asm/thread_info.h> | 34 | #include <asm/thread_info.h> |
34 | #include <asm/uaccess.h> | 35 | #include <asm/uaccess.h> |
@@ -70,8 +71,31 @@ | |||
70 | #define BAD_FIQ 2 | 71 | #define BAD_FIQ 2 |
71 | #define BAD_ERROR 3 | 72 | #define BAD_ERROR 3 |
72 | 73 | ||
73 | .macro kernel_entry, el, regsize = 64 | 74 | .macro kernel_ventry, el, label, regsize = 64 |
75 | .align 7 | ||
76 | #ifdef CONFIG_UNMAP_KERNEL_AT_EL0 | ||
77 | alternative_if ARM64_UNMAP_KERNEL_AT_EL0 | ||
78 | .if \el == 0 | ||
79 | .if \regsize == 64 | ||
80 | mrs x30, tpidrro_el0 | ||
81 | msr tpidrro_el0, xzr | ||
82 | .else | ||
83 | mov x30, xzr | ||
84 | .endif | ||
85 | .endif | ||
86 | alternative_else_nop_endif | ||
87 | #endif | ||
88 | |||
74 | sub sp, sp, #S_FRAME_SIZE | 89 | sub sp, sp, #S_FRAME_SIZE |
90 | b el\()\el\()_\label | ||
91 | .endm | ||
92 | |||
93 | .macro tramp_alias, dst, sym | ||
94 | mov_q \dst, TRAMP_VALIAS | ||
95 | add \dst, \dst, #(\sym - .entry.tramp.text) | ||
96 | .endm | ||
97 | |||
98 | .macro kernel_entry, el, regsize = 64 | ||
75 | .if \regsize == 32 | 99 | .if \regsize == 32 |
76 | mov w0, w0 // zero upper 32 bits of x0 | 100 | mov w0, w0 // zero upper 32 bits of x0 |
77 | .endif | 101 | .endif |
@@ -141,7 +165,7 @@ alternative_else_nop_endif | |||
141 | 165 | ||
142 | .if \el != 0 | 166 | .if \el != 0 |
143 | mrs x21, ttbr0_el1 | 167 | mrs x21, ttbr0_el1 |
144 | tst x21, #0xffff << 48 // Check for the reserved ASID | 168 | tst x21, #TTBR_ASID_MASK // Check for the reserved ASID |
145 | orr x23, x23, #PSR_PAN_BIT // Set the emulated PAN in the saved SPSR | 169 | orr x23, x23, #PSR_PAN_BIT // Set the emulated PAN in the saved SPSR |
146 | b.eq 1f // TTBR0 access already disabled | 170 | b.eq 1f // TTBR0 access already disabled |
147 | and x23, x23, #~PSR_PAN_BIT // Clear the emulated PAN in the saved SPSR | 171 | and x23, x23, #~PSR_PAN_BIT // Clear the emulated PAN in the saved SPSR |
@@ -208,7 +232,7 @@ alternative_else_nop_endif | |||
208 | tbnz x22, #22, 1f // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set | 232 | tbnz x22, #22, 1f // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set |
209 | .endif | 233 | .endif |
210 | 234 | ||
211 | __uaccess_ttbr0_enable x0 | 235 | __uaccess_ttbr0_enable x0, x1 |
212 | 236 | ||
213 | .if \el == 0 | 237 | .if \el == 0 |
214 | /* | 238 | /* |
@@ -217,7 +241,7 @@ alternative_else_nop_endif | |||
217 | * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache | 241 | * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache |
218 | * corruption). | 242 | * corruption). |
219 | */ | 243 | */ |
220 | post_ttbr0_update_workaround | 244 | bl post_ttbr_update_workaround |
221 | .endif | 245 | .endif |
222 | 1: | 246 | 1: |
223 | .if \el != 0 | 247 | .if \el != 0 |
@@ -229,24 +253,20 @@ alternative_else_nop_endif | |||
229 | .if \el == 0 | 253 | .if \el == 0 |
230 | ldr x23, [sp, #S_SP] // load return stack pointer | 254 | ldr x23, [sp, #S_SP] // load return stack pointer |
231 | msr sp_el0, x23 | 255 | msr sp_el0, x23 |
256 | tst x22, #PSR_MODE32_BIT // native task? | ||
257 | b.eq 3f | ||
258 | |||
232 | #ifdef CONFIG_ARM64_ERRATUM_845719 | 259 | #ifdef CONFIG_ARM64_ERRATUM_845719 |
233 | alternative_if_not ARM64_WORKAROUND_845719 | 260 | alternative_if ARM64_WORKAROUND_845719 |
234 | nop | ||
235 | nop | ||
236 | #ifdef CONFIG_PID_IN_CONTEXTIDR | ||
237 | nop | ||
238 | #endif | ||
239 | alternative_else | ||
240 | tbz x22, #4, 1f | ||
241 | #ifdef CONFIG_PID_IN_CONTEXTIDR | 261 | #ifdef CONFIG_PID_IN_CONTEXTIDR |
242 | mrs x29, contextidr_el1 | 262 | mrs x29, contextidr_el1 |
243 | msr contextidr_el1, x29 | 263 | msr contextidr_el1, x29 |
244 | #else | 264 | #else |
245 | msr contextidr_el1, xzr | 265 | msr contextidr_el1, xzr |
246 | #endif | 266 | #endif |
247 | 1: | 267 | alternative_else_nop_endif |
248 | alternative_endif | ||
249 | #endif | 268 | #endif |
269 | 3: | ||
250 | .endif | 270 | .endif |
251 | 271 | ||
252 | msr elr_el1, x21 // set up the return data | 272 | msr elr_el1, x21 // set up the return data |
@@ -268,7 +288,21 @@ alternative_endif | |||
268 | ldp x28, x29, [sp, #16 * 14] | 288 | ldp x28, x29, [sp, #16 * 14] |
269 | ldr lr, [sp, #S_LR] | 289 | ldr lr, [sp, #S_LR] |
270 | add sp, sp, #S_FRAME_SIZE // restore sp | 290 | add sp, sp, #S_FRAME_SIZE // restore sp |
271 | eret // return to kernel | 291 | |
292 | .if \el == 0 | ||
293 | alternative_insn eret, nop, ARM64_UNMAP_KERNEL_AT_EL0 | ||
294 | #ifdef CONFIG_UNMAP_KERNEL_AT_EL0 | ||
295 | bne 4f | ||
296 | msr far_el1, x30 | ||
297 | tramp_alias x30, tramp_exit_native | ||
298 | br x30 | ||
299 | 4: | ||
300 | tramp_alias x30, tramp_exit_compat | ||
301 | br x30 | ||
302 | #endif | ||
303 | .else | ||
304 | eret | ||
305 | .endif | ||
272 | .endm | 306 | .endm |
273 | 307 | ||
274 | .macro irq_stack_entry | 308 | .macro irq_stack_entry |
@@ -346,31 +380,31 @@ tsk .req x28 // current thread_info | |||
346 | 380 | ||
347 | .align 11 | 381 | .align 11 |
348 | ENTRY(vectors) | 382 | ENTRY(vectors) |
349 | ventry el1_sync_invalid // Synchronous EL1t | 383 | kernel_ventry 1, sync_invalid // Synchronous EL1t |
350 | ventry el1_irq_invalid // IRQ EL1t | 384 | kernel_ventry 1, irq_invalid // IRQ EL1t |
351 | ventry el1_fiq_invalid // FIQ EL1t | 385 | kernel_ventry 1, fiq_invalid // FIQ EL1t |
352 | ventry el1_error_invalid // Error EL1t | 386 | kernel_ventry 1, error_invalid // Error EL1t |
353 | 387 | ||
354 | ventry el1_sync // Synchronous EL1h | 388 | kernel_ventry 1, sync // Synchronous EL1h |
355 | ventry el1_irq // IRQ EL1h | 389 | kernel_ventry 1, irq // IRQ EL1h |
356 | ventry el1_fiq_invalid // FIQ EL1h | 390 | kernel_ventry 1, fiq_invalid // FIQ EL1h |
357 | ventry el1_error_invalid // Error EL1h | 391 | kernel_ventry 1, error_invalid // Error EL1h |
358 | 392 | ||
359 | ventry el0_sync // Synchronous 64-bit EL0 | 393 | kernel_ventry 0, sync // Synchronous 64-bit EL0 |
360 | ventry el0_irq // IRQ 64-bit EL0 | 394 | kernel_ventry 0, irq // IRQ 64-bit EL0 |
361 | ventry el0_fiq_invalid // FIQ 64-bit EL0 | 395 | kernel_ventry 0, fiq_invalid // FIQ 64-bit EL0 |
362 | ventry el0_error_invalid // Error 64-bit EL0 | 396 | kernel_ventry 0, error_invalid // Error 64-bit EL0 |
363 | 397 | ||
364 | #ifdef CONFIG_COMPAT | 398 | #ifdef CONFIG_COMPAT |
365 | ventry el0_sync_compat // Synchronous 32-bit EL0 | 399 | kernel_ventry 0, sync_compat, 32 // Synchronous 32-bit EL0 |
366 | ventry el0_irq_compat // IRQ 32-bit EL0 | 400 | kernel_ventry 0, irq_compat, 32 // IRQ 32-bit EL0 |
367 | ventry el0_fiq_invalid_compat // FIQ 32-bit EL0 | 401 | kernel_ventry 0, fiq_invalid_compat, 32 // FIQ 32-bit EL0 |
368 | ventry el0_error_invalid_compat // Error 32-bit EL0 | 402 | kernel_ventry 0, error_invalid_compat, 32 // Error 32-bit EL0 |
369 | #else | 403 | #else |
370 | ventry el0_sync_invalid // Synchronous 32-bit EL0 | 404 | kernel_ventry 0, sync_invalid, 32 // Synchronous 32-bit EL0 |
371 | ventry el0_irq_invalid // IRQ 32-bit EL0 | 405 | kernel_ventry 0, irq_invalid, 32 // IRQ 32-bit EL0 |
372 | ventry el0_fiq_invalid // FIQ 32-bit EL0 | 406 | kernel_ventry 0, fiq_invalid, 32 // FIQ 32-bit EL0 |
373 | ventry el0_error_invalid // Error 32-bit EL0 | 407 | kernel_ventry 0, error_invalid, 32 // Error 32-bit EL0 |
374 | #endif | 408 | #endif |
375 | END(vectors) | 409 | END(vectors) |
376 | 410 | ||
@@ -572,7 +606,7 @@ el0_sync: | |||
572 | cmp x24, #ESR_ELx_EC_FP_EXC64 // FP/ASIMD exception | 606 | cmp x24, #ESR_ELx_EC_FP_EXC64 // FP/ASIMD exception |
573 | b.eq el0_fpsimd_exc | 607 | b.eq el0_fpsimd_exc |
574 | cmp x24, #ESR_ELx_EC_SYS64 // configurable trap | 608 | cmp x24, #ESR_ELx_EC_SYS64 // configurable trap |
575 | b.eq el0_undef | 609 | b.eq el0_sys |
576 | cmp x24, #ESR_ELx_EC_SP_ALIGN // stack alignment exception | 610 | cmp x24, #ESR_ELx_EC_SP_ALIGN // stack alignment exception |
577 | b.eq el0_sp_pc | 611 | b.eq el0_sp_pc |
578 | cmp x24, #ESR_ELx_EC_PC_ALIGN // pc alignment exception | 612 | cmp x24, #ESR_ELx_EC_PC_ALIGN // pc alignment exception |
@@ -700,6 +734,16 @@ el0_undef: | |||
700 | mov x0, sp | 734 | mov x0, sp |
701 | bl do_undefinstr | 735 | bl do_undefinstr |
702 | b ret_to_user | 736 | b ret_to_user |
737 | el0_sys: | ||
738 | /* | ||
739 | * System instructions, for trapped cache maintenance instructions | ||
740 | */ | ||
741 | enable_dbg_and_irq | ||
742 | ct_user_exit | ||
743 | mov x0, x25 | ||
744 | mov x1, sp | ||
745 | bl do_sysinstr | ||
746 | b ret_to_user | ||
703 | el0_dbg: | 747 | el0_dbg: |
704 | /* | 748 | /* |
705 | * Debug exception handling | 749 | * Debug exception handling |
@@ -911,7 +955,118 @@ __ni_sys_trace: | |||
911 | bl do_ni_syscall | 955 | bl do_ni_syscall |
912 | b __sys_trace_return | 956 | b __sys_trace_return |
913 | 957 | ||
914 | .popsection // .entry.text | 958 | #ifdef CONFIG_UNMAP_KERNEL_AT_EL0 |
959 | /* | ||
960 | * Exception vectors trampoline. | ||
961 | */ | ||
962 | .pushsection ".entry.tramp.text", "ax" | ||
963 | |||
964 | .macro tramp_map_kernel, tmp | ||
965 | mrs \tmp, ttbr1_el1 | ||
966 | sub \tmp, \tmp, #(SWAPPER_DIR_SIZE + RESERVED_TTBR0_SIZE) | ||
967 | bic \tmp, \tmp, #USER_ASID_FLAG | ||
968 | msr ttbr1_el1, \tmp | ||
969 | #ifdef CONFIG_ARCH_MSM8996 | ||
970 | /* ASID already in \tmp[63:48] */ | ||
971 | movk \tmp, #:abs_g2_nc:(TRAMP_VALIAS >> 12) | ||
972 | movk \tmp, #:abs_g1_nc:(TRAMP_VALIAS >> 12) | ||
973 | /* 2MB boundary containing the vectors, so we nobble the walk cache */ | ||
974 | movk \tmp, #:abs_g0_nc:((TRAMP_VALIAS & ~(SZ_2M - 1)) >> 12) | ||
975 | isb | ||
976 | tlbi vae1, \tmp | ||
977 | dsb nsh | ||
978 | #endif /* CONFIG_ARCH_MSM8996 */ | ||
979 | .endm | ||
980 | |||
981 | .macro tramp_unmap_kernel, tmp | ||
982 | mrs \tmp, ttbr1_el1 | ||
983 | add \tmp, \tmp, #(SWAPPER_DIR_SIZE + RESERVED_TTBR0_SIZE) | ||
984 | orr \tmp, \tmp, #USER_ASID_FLAG | ||
985 | msr ttbr1_el1, \tmp | ||
986 | /* | ||
987 | * We avoid running the post_ttbr_update_workaround here because the | ||
988 | * user and kernel ASIDs don't have conflicting mappings, so any | ||
989 | * "blessing" as described in: | ||
990 | * | ||
991 | * http://lkml.kernel.org/r/56BB848A.6060603@caviumnetworks.com | ||
992 | * | ||
993 | * will not hurt correctness. Whilst this may partially defeat the | ||
994 | * point of using split ASIDs in the first place, it avoids | ||
995 | * the hit of invalidating the entire I-cache on every return to | ||
996 | * userspace. | ||
997 | */ | ||
998 | .endm | ||
999 | |||
1000 | .macro tramp_ventry, regsize = 64 | ||
1001 | .align 7 | ||
1002 | 1: | ||
1003 | .if \regsize == 64 | ||
1004 | msr tpidrro_el0, x30 // Restored in kernel_ventry | ||
1005 | .endif | ||
1006 | bl 2f | ||
1007 | b . | ||
1008 | 2: | ||
1009 | tramp_map_kernel x30 | ||
1010 | #ifdef CONFIG_RANDOMIZE_BASE | ||
1011 | adr x30, tramp_vectors + PAGE_SIZE | ||
1012 | #ifndef CONFIG_ARCH_MSM8996 | ||
1013 | isb | ||
1014 | #endif | ||
1015 | ldr x30, [x30] | ||
1016 | #else | ||
1017 | ldr x30, =vectors | ||
1018 | #endif | ||
1019 | prfm plil1strm, [x30, #(1b - tramp_vectors)] | ||
1020 | msr vbar_el1, x30 | ||
1021 | add x30, x30, #(1b - tramp_vectors) | ||
1022 | isb | ||
1023 | ret | ||
1024 | .endm | ||
1025 | |||
1026 | .macro tramp_exit, regsize = 64 | ||
1027 | adr x30, tramp_vectors | ||
1028 | msr vbar_el1, x30 | ||
1029 | tramp_unmap_kernel x30 | ||
1030 | .if \regsize == 64 | ||
1031 | mrs x30, far_el1 | ||
1032 | .endif | ||
1033 | eret | ||
1034 | .endm | ||
1035 | |||
1036 | .align 11 | ||
1037 | ENTRY(tramp_vectors) | ||
1038 | .space 0x400 | ||
1039 | |||
1040 | tramp_ventry | ||
1041 | tramp_ventry | ||
1042 | tramp_ventry | ||
1043 | tramp_ventry | ||
1044 | |||
1045 | tramp_ventry 32 | ||
1046 | tramp_ventry 32 | ||
1047 | tramp_ventry 32 | ||
1048 | tramp_ventry 32 | ||
1049 | END(tramp_vectors) | ||
1050 | |||
1051 | ENTRY(tramp_exit_native) | ||
1052 | tramp_exit | ||
1053 | END(tramp_exit_native) | ||
1054 | |||
1055 | ENTRY(tramp_exit_compat) | ||
1056 | tramp_exit 32 | ||
1057 | END(tramp_exit_compat) | ||
1058 | |||
1059 | .ltorg | ||
1060 | .popsection // .entry.tramp.text | ||
1061 | #ifdef CONFIG_RANDOMIZE_BASE | ||
1062 | .pushsection ".rodata", "a" | ||
1063 | .align PAGE_SHIFT | ||
1064 | .globl __entry_tramp_data_start | ||
1065 | __entry_tramp_data_start: | ||
1066 | .quad vectors | ||
1067 | .popsection // .rodata | ||
1068 | #endif /* CONFIG_RANDOMIZE_BASE */ | ||
1069 | #endif /* CONFIG_UNMAP_KERNEL_AT_EL0 */ | ||
915 | 1070 | ||
916 | /* | 1071 | /* |
917 | * Special system call wrappers. | 1072 | * Special system call wrappers. |