Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 | /* * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com> * * SPDX-License-Identifier: Apache-2.0 */ #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ #define ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ #ifndef _ASMLANGUAGE #include <arch/arm64/cpu.h> #include <stdint.h> /* All the macros need a memory clobber */ #define read_sysreg(reg) \ ({ \ uint64_t val; \ __asm__ volatile ("mrs %0, " STRINGIFY(reg) \ : "=r" (val) :: "memory"); \ val; \ }) #define write_sysreg(val, reg) \ ({ \ __asm__ volatile ("msr " STRINGIFY(reg) ", %0" \ :: "r" (val) : "memory"); \ }) #define zero_sysreg(reg) \ ({ \ __asm__ volatile ("msr " STRINGIFY(reg) ", xzr" \ ::: "memory"); \ }) #define MAKE_REG_HELPER(reg) \ static ALWAYS_INLINE uint64_t read_##reg(void) \ { \ return read_sysreg(reg); \ } \ static ALWAYS_INLINE void write_##reg(uint64_t val) \ { \ write_sysreg(val, reg); \ } \ static ALWAYS_INLINE void zero_##reg(void) \ { \ zero_sysreg(reg); \ } #define MAKE_REG_HELPER_EL123(reg) \ MAKE_REG_HELPER(reg##_el1) \ MAKE_REG_HELPER(reg##_el2) \ MAKE_REG_HELPER(reg##_el3) MAKE_REG_HELPER(cntfrq_el0); MAKE_REG_HELPER(cnthctl_el2); MAKE_REG_HELPER(cnthp_ctl_el2); MAKE_REG_HELPER(cnthps_ctl_el2); MAKE_REG_HELPER(cntv_ctl_el0) MAKE_REG_HELPER(cntv_cval_el0) MAKE_REG_HELPER(cntvct_el0); MAKE_REG_HELPER(cntvoff_el2); MAKE_REG_HELPER(currentel); MAKE_REG_HELPER(daif) MAKE_REG_HELPER(hcr_el2); MAKE_REG_HELPER(id_aa64pfr0_el1); MAKE_REG_HELPER(id_aa64mmfr0_el1); MAKE_REG_HELPER(scr_el3); MAKE_REG_HELPER(tpidrro_el0); MAKE_REG_HELPER(clidr_el1); MAKE_REG_HELPER(csselr_el1); MAKE_REG_HELPER(ccsidr_el1); MAKE_REG_HELPER(vmpidr_el2); MAKE_REG_HELPER(mpidr_el1); MAKE_REG_HELPER_EL123(actlr) MAKE_REG_HELPER_EL123(cpacr) MAKE_REG_HELPER_EL123(cptr) MAKE_REG_HELPER_EL123(elr) MAKE_REG_HELPER_EL123(esr) MAKE_REG_HELPER_EL123(far) MAKE_REG_HELPER_EL123(mair) MAKE_REG_HELPER_EL123(sctlr) MAKE_REG_HELPER_EL123(spsr) MAKE_REG_HELPER_EL123(tcr) MAKE_REG_HELPER_EL123(ttbr0) MAKE_REG_HELPER_EL123(vbar) #if defined(CONFIG_ARM_MPU) /* Armv8-R aarch64 mpu registers */ #define mpuir_el1 S3_0_c0_c0_4 #define prselr_el1 S3_0_c6_c2_1 #define prbar_el1 S3_0_c6_c8_0 #define prlar_el1 S3_0_c6_c8_1 MAKE_REG_HELPER(mpuir_el1); MAKE_REG_HELPER(prselr_el1); MAKE_REG_HELPER(prbar_el1); MAKE_REG_HELPER(prlar_el1); #endif static ALWAYS_INLINE void enable_debug_exceptions(void) { __asm__ volatile ("msr DAIFClr, %0" :: "i" (DAIFCLR_DBG_BIT) : "memory"); } static ALWAYS_INLINE void disable_debug_exceptions(void) { __asm__ volatile ("msr DAIFSet, %0" :: "i" (DAIFSET_DBG_BIT) : "memory"); } static ALWAYS_INLINE void enable_serror_exceptions(void) { __asm__ volatile ("msr DAIFClr, %0" :: "i" (DAIFCLR_ABT_BIT) : "memory"); } static ALWAYS_INLINE void disable_serror_exceptions(void) { __asm__ volatile ("msr DAIFSet, %0" :: "i" (DAIFSET_ABT_BIT) : "memory"); } static ALWAYS_INLINE void enable_irq(void) { __asm__ volatile ("msr DAIFClr, %0" :: "i" (DAIFCLR_IRQ_BIT) : "memory"); } static ALWAYS_INLINE void disable_irq(void) { __asm__ volatile ("msr DAIFSet, %0" :: "i" (DAIFSET_IRQ_BIT) : "memory"); } static ALWAYS_INLINE void enable_fiq(void) { __asm__ volatile ("msr DAIFClr, %0" :: "i" (DAIFCLR_FIQ_BIT) : "memory"); } static ALWAYS_INLINE void disable_fiq(void) { __asm__ volatile ("msr DAIFSet, %0" :: "i" (DAIFSET_FIQ_BIT) : "memory"); } #define sev() __asm__ volatile("sev" : : : "memory") #define wfe() __asm__ volatile("wfe" : : : "memory") #define wfi() __asm__ volatile("wfi" : : : "memory") #define dsb() __asm__ volatile ("dsb sy" ::: "memory") #define dmb() __asm__ volatile ("dmb sy" ::: "memory") #define isb() __asm__ volatile ("isb" ::: "memory") /* Zephyr needs these as well */ #define __ISB() isb() #define __DMB() dmb() #define __DSB() dsb() static inline bool is_el_implemented(unsigned int el) { unsigned int shift; if (el > 3) { return false; } shift = ID_AA64PFR0_EL1_SHIFT * el; return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U); } static inline bool is_el_highest_implemented(void) { uint32_t el_highest; uint32_t curr_el; el_highest = read_id_aa64pfr0_el1() & 0xFFFF; el_highest = (31U - __builtin_clz(el_highest)) / 4; curr_el = GET_EL(read_currentel()); if (curr_el < el_highest) return false; return true; } static inline bool is_el2_sec_supported(void) { return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) & ID_AA64PFR0_SEL2_MASK) != 0U); } static inline bool is_in_secure_state(void) { /* We cannot read SCR_EL3 from EL2 or EL1 */ return !IS_ENABLED(CONFIG_ARMV8_A_NS); } #endif /* !_ASMLANGUAGE */ #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ */ |