forked from Minki/linux
Merge branch 'for-next/sysregs' into for-next/core
* for-next/sysregs: (28 commits) arm64/sysreg: Convert ID_AA64ZFR0_EL1 to automatic generation arm64/sysreg: Convert ID_AA64SMFR0_EL1 to automatic generation arm64/sysreg: Convert LORID_EL1 to automatic generation arm64/sysreg: Convert LORC_EL1 to automatic generation arm64/sysreg: Convert LORN_EL1 to automatic generation arm64/sysreg: Convert LOREA_EL1 to automatic generation arm64/sysreg: Convert LORSA_EL1 to automatic generation arm64/sysreg: Convert ID_AA64ISAR2_EL1 to automatic generation arm64/sysreg: Convert ID_AA64ISAR1_EL1 to automatic generation arm64/sysreg: Convert GMID to automatic generation arm64/sysreg: Convert DCZID_EL0 to automatic generation arm64/sysreg: Convert CTR_EL0 to automatic generation arm64/sysreg: Add _EL1 into ID_AA64ISAR2_EL1 definition names arm64/sysreg: Add _EL1 into ID_AA64ISAR1_EL1 definition names arm64/sysreg: Remove defines for RPRES enumeration arm64/sysreg: Standardise naming for ID_AA64ZFR0_EL1 fields arm64/sysreg: Standardise naming for ID_AA64SMFR0_EL1 enums arm64/sysreg: Standardise naming for WFxT defines arm64/sysreg: Make BHB clear feature defines match the architecture arm64/sysreg: Align pointer auth enumeration defines with architecture ...
This commit is contained in:
commit
618ff55eec
@ -59,9 +59,9 @@ alternative_else_nop_endif
|
||||
|
||||
.macro __ptrauth_keys_init_cpu tsk, tmp1, tmp2, tmp3
|
||||
mrs \tmp1, id_aa64isar1_el1
|
||||
ubfx \tmp1, \tmp1, #ID_AA64ISAR1_APA_SHIFT, #8
|
||||
ubfx \tmp1, \tmp1, #ID_AA64ISAR1_EL1_APA_SHIFT, #8
|
||||
mrs_s \tmp2, SYS_ID_AA64ISAR2_EL1
|
||||
ubfx \tmp2, \tmp2, #ID_AA64ISAR2_APA3_SHIFT, #4
|
||||
ubfx \tmp2, \tmp2, #ID_AA64ISAR2_EL1_APA3_SHIFT, #4
|
||||
orr \tmp1, \tmp1, \tmp2
|
||||
cbz \tmp1, .Lno_addr_auth\@
|
||||
mov_q \tmp1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \
|
||||
|
@ -5,34 +5,9 @@
|
||||
#ifndef __ASM_CACHE_H
|
||||
#define __ASM_CACHE_H
|
||||
|
||||
#include <asm/cputype.h>
|
||||
#include <asm/mte-def.h>
|
||||
|
||||
#define CTR_L1IP_SHIFT 14
|
||||
#define CTR_L1IP_MASK 3
|
||||
#define CTR_DMINLINE_SHIFT 16
|
||||
#define CTR_IMINLINE_SHIFT 0
|
||||
#define CTR_IMINLINE_MASK 0xf
|
||||
#define CTR_ERG_SHIFT 20
|
||||
#define CTR_CWG_SHIFT 24
|
||||
#define CTR_CWG_MASK 15
|
||||
#define CTR_IDC_SHIFT 28
|
||||
#define CTR_DIC_SHIFT 29
|
||||
|
||||
#define CTR_CACHE_MINLINE_MASK \
|
||||
(0xf << CTR_DMINLINE_SHIFT | CTR_IMINLINE_MASK << CTR_IMINLINE_SHIFT)
|
||||
|
||||
#define CTR_L1IP(ctr) (((ctr) >> CTR_L1IP_SHIFT) & CTR_L1IP_MASK)
|
||||
|
||||
#define ICACHE_POLICY_VPIPT 0
|
||||
#define ICACHE_POLICY_RESERVED 1
|
||||
#define ICACHE_POLICY_VIPT 2
|
||||
#define ICACHE_POLICY_PIPT 3
|
||||
|
||||
#define L1_CACHE_SHIFT (6)
|
||||
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
|
||||
|
||||
|
||||
#define CLIDR_LOUU_SHIFT 27
|
||||
#define CLIDR_LOC_SHIFT 24
|
||||
#define CLIDR_LOUIS_SHIFT 21
|
||||
@ -55,6 +30,10 @@
|
||||
#include <linux/bitops.h>
|
||||
#include <linux/kasan-enabled.h>
|
||||
|
||||
#include <asm/cputype.h>
|
||||
#include <asm/mte-def.h>
|
||||
#include <asm/sysreg.h>
|
||||
|
||||
#ifdef CONFIG_KASAN_SW_TAGS
|
||||
#define ARCH_SLAB_MINALIGN (1ULL << KASAN_SHADOW_SCALE_SHIFT)
|
||||
#elif defined(CONFIG_KASAN_HW_TAGS)
|
||||
@ -66,6 +45,12 @@ static inline unsigned int arch_slab_minalign(void)
|
||||
#define arch_slab_minalign() arch_slab_minalign()
|
||||
#endif
|
||||
|
||||
#define CTR_CACHE_MINLINE_MASK \
|
||||
(0xf << CTR_EL0_DMINLINE_SHIFT | \
|
||||
CTR_EL0_IMINLINE_MASK << CTR_EL0_IMINLINE_SHIFT)
|
||||
|
||||
#define CTR_L1IP(ctr) SYS_FIELD_GET(CTR_EL0, L1Ip, ctr)
|
||||
|
||||
#define ICACHEF_ALIASING 0
|
||||
#define ICACHEF_VPIPT 1
|
||||
extern unsigned long __icache_flags;
|
||||
@ -86,7 +71,7 @@ static __always_inline int icache_is_vpipt(void)
|
||||
|
||||
static inline u32 cache_type_cwg(void)
|
||||
{
|
||||
return (read_cpuid_cachetype() >> CTR_CWG_SHIFT) & CTR_CWG_MASK;
|
||||
return (read_cpuid_cachetype() >> CTR_EL0_CWG_SHIFT) & CTR_EL0_CWG_MASK;
|
||||
}
|
||||
|
||||
#define __read_mostly __section(".data..read_mostly")
|
||||
@ -120,12 +105,12 @@ static inline u32 __attribute_const__ read_cpuid_effective_cachetype(void)
|
||||
{
|
||||
u32 ctr = read_cpuid_cachetype();
|
||||
|
||||
if (!(ctr & BIT(CTR_IDC_SHIFT))) {
|
||||
if (!(ctr & BIT(CTR_EL0_IDC_SHIFT))) {
|
||||
u64 clidr = read_sysreg(clidr_el1);
|
||||
|
||||
if (CLIDR_LOC(clidr) == 0 ||
|
||||
(CLIDR_LOUIS(clidr) == 0 && CLIDR_LOUU(clidr) == 0))
|
||||
ctr |= BIT(CTR_IDC_SHIFT);
|
||||
ctr |= BIT(CTR_EL0_IDC_SHIFT);
|
||||
}
|
||||
|
||||
return ctr;
|
||||
|
@ -673,7 +673,7 @@ static inline bool supports_clearbhb(int scope)
|
||||
isar2 = read_sanitised_ftr_reg(SYS_ID_AA64ISAR2_EL1);
|
||||
|
||||
return cpuid_feature_extract_unsigned_field(isar2,
|
||||
ID_AA64ISAR2_CLEARBHB_SHIFT);
|
||||
ID_AA64ISAR2_EL1_BC_SHIFT);
|
||||
}
|
||||
|
||||
const struct cpumask *system_32bit_el0_cpumask(void);
|
||||
|
@ -161,7 +161,7 @@
|
||||
mov x1, #0 // SMCR controls
|
||||
|
||||
mrs_s x2, SYS_ID_AA64SMFR0_EL1
|
||||
ubfx x2, x2, #ID_AA64SMFR0_FA64_SHIFT, #1 // Full FP in SM?
|
||||
ubfx x2, x2, #ID_AA64SMFR0_EL1_FA64_SHIFT, #1 // Full FP in SM?
|
||||
cbz x2, .Lskip_sme_fa64_\@
|
||||
|
||||
orr x1, x1, SMCR_ELx_FA64_MASK
|
||||
|
@ -192,8 +192,6 @@
|
||||
|
||||
#define SYS_ID_AA64PFR0_EL1 sys_reg(3, 0, 0, 4, 0)
|
||||
#define SYS_ID_AA64PFR1_EL1 sys_reg(3, 0, 0, 4, 1)
|
||||
#define SYS_ID_AA64ZFR0_EL1 sys_reg(3, 0, 0, 4, 4)
|
||||
#define SYS_ID_AA64SMFR0_EL1 sys_reg(3, 0, 0, 4, 5)
|
||||
|
||||
#define SYS_ID_AA64DFR0_EL1 sys_reg(3, 0, 0, 5, 0)
|
||||
#define SYS_ID_AA64DFR1_EL1 sys_reg(3, 0, 0, 5, 1)
|
||||
@ -201,9 +199,6 @@
|
||||
#define SYS_ID_AA64AFR0_EL1 sys_reg(3, 0, 0, 5, 4)
|
||||
#define SYS_ID_AA64AFR1_EL1 sys_reg(3, 0, 0, 5, 5)
|
||||
|
||||
#define SYS_ID_AA64ISAR1_EL1 sys_reg(3, 0, 0, 6, 1)
|
||||
#define SYS_ID_AA64ISAR2_EL1 sys_reg(3, 0, 0, 6, 2)
|
||||
|
||||
#define SYS_ID_AA64MMFR0_EL1 sys_reg(3, 0, 0, 7, 0)
|
||||
#define SYS_ID_AA64MMFR1_EL1 sys_reg(3, 0, 0, 7, 1)
|
||||
#define SYS_ID_AA64MMFR2_EL1 sys_reg(3, 0, 0, 7, 2)
|
||||
@ -410,12 +405,6 @@
|
||||
#define SYS_MAIR_EL1 sys_reg(3, 0, 10, 2, 0)
|
||||
#define SYS_AMAIR_EL1 sys_reg(3, 0, 10, 3, 0)
|
||||
|
||||
#define SYS_LORSA_EL1 sys_reg(3, 0, 10, 4, 0)
|
||||
#define SYS_LOREA_EL1 sys_reg(3, 0, 10, 4, 1)
|
||||
#define SYS_LORN_EL1 sys_reg(3, 0, 10, 4, 2)
|
||||
#define SYS_LORC_EL1 sys_reg(3, 0, 10, 4, 3)
|
||||
#define SYS_LORID_EL1 sys_reg(3, 0, 10, 4, 7)
|
||||
|
||||
#define SYS_VBAR_EL1 sys_reg(3, 0, 12, 0, 0)
|
||||
#define SYS_DISR_EL1 sys_reg(3, 0, 12, 1, 1)
|
||||
|
||||
@ -454,16 +443,12 @@
|
||||
#define SYS_CNTKCTL_EL1 sys_reg(3, 0, 14, 1, 0)
|
||||
|
||||
#define SYS_CCSIDR_EL1 sys_reg(3, 1, 0, 0, 0)
|
||||
#define SYS_GMID_EL1 sys_reg(3, 1, 0, 0, 4)
|
||||
#define SYS_AIDR_EL1 sys_reg(3, 1, 0, 0, 7)
|
||||
|
||||
#define SMIDR_EL1_IMPLEMENTER_SHIFT 24
|
||||
#define SMIDR_EL1_SMPS_SHIFT 15
|
||||
#define SMIDR_EL1_AFFINITY_SHIFT 0
|
||||
|
||||
#define SYS_CTR_EL0 sys_reg(3, 3, 0, 0, 1)
|
||||
#define SYS_DCZID_EL0 sys_reg(3, 3, 0, 0, 7)
|
||||
|
||||
#define SYS_RNDR_EL0 sys_reg(3, 3, 2, 4, 0)
|
||||
#define SYS_RNDRRS_EL0 sys_reg(3, 3, 2, 4, 1)
|
||||
|
||||
@ -704,66 +689,6 @@
|
||||
/* Position the attr at the correct index */
|
||||
#define MAIR_ATTRIDX(attr, idx) ((attr) << ((idx) * 8))
|
||||
|
||||
/* id_aa64isar1 */
|
||||
#define ID_AA64ISAR1_I8MM_SHIFT 52
|
||||
#define ID_AA64ISAR1_DGH_SHIFT 48
|
||||
#define ID_AA64ISAR1_BF16_SHIFT 44
|
||||
#define ID_AA64ISAR1_SPECRES_SHIFT 40
|
||||
#define ID_AA64ISAR1_SB_SHIFT 36
|
||||
#define ID_AA64ISAR1_FRINTTS_SHIFT 32
|
||||
#define ID_AA64ISAR1_GPI_SHIFT 28
|
||||
#define ID_AA64ISAR1_GPA_SHIFT 24
|
||||
#define ID_AA64ISAR1_LRCPC_SHIFT 20
|
||||
#define ID_AA64ISAR1_FCMA_SHIFT 16
|
||||
#define ID_AA64ISAR1_JSCVT_SHIFT 12
|
||||
#define ID_AA64ISAR1_API_SHIFT 8
|
||||
#define ID_AA64ISAR1_APA_SHIFT 4
|
||||
#define ID_AA64ISAR1_DPB_SHIFT 0
|
||||
|
||||
#define ID_AA64ISAR1_APA_NI 0x0
|
||||
#define ID_AA64ISAR1_APA_ARCHITECTED 0x1
|
||||
#define ID_AA64ISAR1_APA_ARCH_EPAC 0x2
|
||||
#define ID_AA64ISAR1_APA_ARCH_EPAC2 0x3
|
||||
#define ID_AA64ISAR1_APA_ARCH_EPAC2_FPAC 0x4
|
||||
#define ID_AA64ISAR1_APA_ARCH_EPAC2_FPAC_CMB 0x5
|
||||
#define ID_AA64ISAR1_API_NI 0x0
|
||||
#define ID_AA64ISAR1_API_IMP_DEF 0x1
|
||||
#define ID_AA64ISAR1_API_IMP_DEF_EPAC 0x2
|
||||
#define ID_AA64ISAR1_API_IMP_DEF_EPAC2 0x3
|
||||
#define ID_AA64ISAR1_API_IMP_DEF_EPAC2_FPAC 0x4
|
||||
#define ID_AA64ISAR1_API_IMP_DEF_EPAC2_FPAC_CMB 0x5
|
||||
#define ID_AA64ISAR1_GPA_NI 0x0
|
||||
#define ID_AA64ISAR1_GPA_ARCHITECTED 0x1
|
||||
#define ID_AA64ISAR1_GPI_NI 0x0
|
||||
#define ID_AA64ISAR1_GPI_IMP_DEF 0x1
|
||||
|
||||
/* id_aa64isar2 */
|
||||
#define ID_AA64ISAR2_CLEARBHB_SHIFT 28
|
||||
#define ID_AA64ISAR2_APA3_SHIFT 12
|
||||
#define ID_AA64ISAR2_GPA3_SHIFT 8
|
||||
#define ID_AA64ISAR2_RPRES_SHIFT 4
|
||||
#define ID_AA64ISAR2_WFXT_SHIFT 0
|
||||
|
||||
#define ID_AA64ISAR2_RPRES_8BIT 0x0
|
||||
#define ID_AA64ISAR2_RPRES_12BIT 0x1
|
||||
/*
|
||||
* Value 0x1 has been removed from the architecture, and is
|
||||
* reserved, but has not yet been removed from the ARM ARM
|
||||
* as of ARM DDI 0487G.b.
|
||||
*/
|
||||
#define ID_AA64ISAR2_WFXT_NI 0x0
|
||||
#define ID_AA64ISAR2_WFXT_SUPPORTED 0x2
|
||||
|
||||
#define ID_AA64ISAR2_APA3_NI 0x0
|
||||
#define ID_AA64ISAR2_APA3_ARCHITECTED 0x1
|
||||
#define ID_AA64ISAR2_APA3_ARCH_EPAC 0x2
|
||||
#define ID_AA64ISAR2_APA3_ARCH_EPAC2 0x3
|
||||
#define ID_AA64ISAR2_APA3_ARCH_EPAC2_FPAC 0x4
|
||||
#define ID_AA64ISAR2_APA3_ARCH_EPAC2_FPAC_CMB 0x5
|
||||
|
||||
#define ID_AA64ISAR2_GPA3_NI 0x0
|
||||
#define ID_AA64ISAR2_GPA3_ARCHITECTED 0x1
|
||||
|
||||
/* id_aa64pfr0 */
|
||||
#define ID_AA64PFR0_CSV3_SHIFT 60
|
||||
#define ID_AA64PFR0_CSV2_SHIFT 56
|
||||
@ -811,45 +736,6 @@
|
||||
#define ID_AA64PFR1_MTE 0x2
|
||||
#define ID_AA64PFR1_MTE_ASYMM 0x3
|
||||
|
||||
/* id_aa64zfr0 */
|
||||
#define ID_AA64ZFR0_F64MM_SHIFT 56
|
||||
#define ID_AA64ZFR0_F32MM_SHIFT 52
|
||||
#define ID_AA64ZFR0_I8MM_SHIFT 44
|
||||
#define ID_AA64ZFR0_SM4_SHIFT 40
|
||||
#define ID_AA64ZFR0_SHA3_SHIFT 32
|
||||
#define ID_AA64ZFR0_BF16_SHIFT 20
|
||||
#define ID_AA64ZFR0_BITPERM_SHIFT 16
|
||||
#define ID_AA64ZFR0_AES_SHIFT 4
|
||||
#define ID_AA64ZFR0_SVEVER_SHIFT 0
|
||||
|
||||
#define ID_AA64ZFR0_F64MM 0x1
|
||||
#define ID_AA64ZFR0_F32MM 0x1
|
||||
#define ID_AA64ZFR0_I8MM 0x1
|
||||
#define ID_AA64ZFR0_BF16 0x1
|
||||
#define ID_AA64ZFR0_SM4 0x1
|
||||
#define ID_AA64ZFR0_SHA3 0x1
|
||||
#define ID_AA64ZFR0_BITPERM 0x1
|
||||
#define ID_AA64ZFR0_AES 0x1
|
||||
#define ID_AA64ZFR0_AES_PMULL 0x2
|
||||
#define ID_AA64ZFR0_SVEVER_SVE2 0x1
|
||||
|
||||
/* id_aa64smfr0 */
|
||||
#define ID_AA64SMFR0_FA64_SHIFT 63
|
||||
#define ID_AA64SMFR0_I16I64_SHIFT 52
|
||||
#define ID_AA64SMFR0_F64F64_SHIFT 48
|
||||
#define ID_AA64SMFR0_I8I32_SHIFT 36
|
||||
#define ID_AA64SMFR0_F16F32_SHIFT 35
|
||||
#define ID_AA64SMFR0_B16F32_SHIFT 34
|
||||
#define ID_AA64SMFR0_F32F32_SHIFT 32
|
||||
|
||||
#define ID_AA64SMFR0_FA64 0x1
|
||||
#define ID_AA64SMFR0_I16I64 0xf
|
||||
#define ID_AA64SMFR0_F64F64 0x1
|
||||
#define ID_AA64SMFR0_I8I32 0xf
|
||||
#define ID_AA64SMFR0_F16F32 0x1
|
||||
#define ID_AA64SMFR0_B16F32 0x1
|
||||
#define ID_AA64SMFR0_F32F32 0x1
|
||||
|
||||
/* id_aa64mmfr0 */
|
||||
#define ID_AA64MMFR0_ECV_SHIFT 60
|
||||
#define ID_AA64MMFR0_FGT_SHIFT 56
|
||||
@ -1084,9 +970,6 @@
|
||||
#define MVFR2_FPMISC_SHIFT 4
|
||||
#define MVFR2_SIMDMISC_SHIFT 0
|
||||
|
||||
#define DCZID_DZP_SHIFT 4
|
||||
#define DCZID_BS_SHIFT 0
|
||||
|
||||
#define CPACR_EL1_FPEN_EL1EN (BIT(20)) /* enable EL1 access */
|
||||
#define CPACR_EL1_FPEN_EL0EN (BIT(21)) /* enable EL0 access, if EL1EN set */
|
||||
|
||||
@ -1121,8 +1004,8 @@
|
||||
#define SYS_RGSR_EL1_SEED_MASK 0xffffUL
|
||||
|
||||
/* GMID_EL1 field definitions */
|
||||
#define SYS_GMID_EL1_BS_SHIFT 0
|
||||
#define SYS_GMID_EL1_BS_SIZE 4
|
||||
#define GMID_EL1_BS_SHIFT 0
|
||||
#define GMID_EL1_BS_SIZE 4
|
||||
|
||||
/* TFSR{,E0}_EL1 bit definitions */
|
||||
#define SYS_TFSR_EL1_TF0_SHIFT 0
|
||||
@ -1324,6 +1207,9 @@
|
||||
|
||||
#endif
|
||||
|
||||
#define SYS_FIELD_GET(reg, field, val) \
|
||||
FIELD_GET(reg##_##field##_MASK, val)
|
||||
|
||||
#define SYS_FIELD_PREP(reg, field, val) \
|
||||
FIELD_PREP(reg##_##field##_MASK, val)
|
||||
|
||||
|
@ -121,7 +121,7 @@ static void clean_dcache_range_nopatch(u64 start, u64 end)
|
||||
|
||||
ctr_el0 = read_sanitised_ftr_reg(SYS_CTR_EL0);
|
||||
d_size = 4 << cpuid_feature_extract_unsigned_field(ctr_el0,
|
||||
CTR_DMINLINE_SHIFT);
|
||||
CTR_EL0_DminLine_SHIFT);
|
||||
cur = start & ~(d_size - 1);
|
||||
do {
|
||||
/*
|
||||
|
@ -187,7 +187,7 @@ has_neoverse_n1_erratum_1542419(const struct arm64_cpu_capabilities *entry,
|
||||
int scope)
|
||||
{
|
||||
u32 midr = read_cpuid_id();
|
||||
bool has_dic = read_cpuid_cachetype() & BIT(CTR_DIC_SHIFT);
|
||||
bool has_dic = read_cpuid_cachetype() & BIT(CTR_EL0_DIC_SHIFT);
|
||||
const struct midr_range range = MIDR_ALL_VERSIONS(MIDR_NEOVERSE_N1);
|
||||
|
||||
WARN_ON(scope != SCOPE_LOCAL_CPU || preemptible());
|
||||
|
@ -210,35 +210,35 @@ static const struct arm64_ftr_bits ftr_id_aa64isar0[] = {
|
||||
};
|
||||
|
||||
static const struct arm64_ftr_bits ftr_id_aa64isar1[] = {
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_I8MM_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_DGH_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_BF16_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_SPECRES_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_SB_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_FRINTTS_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_I8MM_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_DGH_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_BF16_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_SPECRES_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_SB_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_FRINTTS_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_GPI_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_GPI_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_GPA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_LRCPC_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_FCMA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_JSCVT_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_GPA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_LRCPC_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_FCMA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_JSCVT_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR1_API_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR1_EL1_API_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR1_APA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_DPB_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR1_EL1_APA_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR1_EL1_DPB_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
static const struct arm64_ftr_bits ftr_id_aa64isar2[] = {
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_HIGHER_SAFE, ID_AA64ISAR2_CLEARBHB_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_HIGHER_SAFE, ID_AA64ISAR2_EL1_BC_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR2_APA3_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64ISAR2_EL1_APA3_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_PTR_AUTH),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_GPA3_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_RPRES_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_WFXT_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_EL1_GPA3_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_EL1_RPRES_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64ISAR2_EL1_WFxT_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
@ -277,41 +277,41 @@ static const struct arm64_ftr_bits ftr_id_aa64pfr1[] = {
|
||||
|
||||
static const struct arm64_ftr_bits ftr_id_aa64zfr0[] = {
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_F64MM_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_F64MM_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_F32MM_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_F32MM_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_I8MM_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_I8MM_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_SM4_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_SM4_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_SHA3_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_SHA3_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_BF16_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_BF16_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_BITPERM_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_BitPerm_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_AES_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_AES_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SVE),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_SVEVER_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ZFR0_EL1_SVEver_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
static const struct arm64_ftr_bits ftr_id_aa64smfr0[] = {
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_FA64_SHIFT, 1, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I16I64_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F64F64_SHIFT, 1, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I8I32_SHIFT, 4, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F16F32_SHIFT, 1, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_B16F32_SHIFT, 1, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F32F32_SHIFT, 1, 0),
|
||||
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
@ -397,18 +397,18 @@ static const struct arm64_ftr_bits ftr_id_aa64mmfr2[] = {
|
||||
|
||||
static const struct arm64_ftr_bits ftr_ctr[] = {
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_EXACT, 31, 1, 1), /* RES1 */
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_DIC_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_IDC_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_HIGHER_OR_ZERO_SAFE, CTR_CWG_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_HIGHER_OR_ZERO_SAFE, CTR_ERG_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_DMINLINE_SHIFT, 4, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_EL0_DIC_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_EL0_IDC_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_HIGHER_OR_ZERO_SAFE, CTR_EL0_CWG_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_HIGHER_OR_ZERO_SAFE, CTR_EL0_ERG_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_EL0_DminLine_SHIFT, 4, 1),
|
||||
/*
|
||||
* Linux can handle differing I-cache policies. Userspace JITs will
|
||||
* make use of *minLine.
|
||||
* If we have differing I-cache policies, report it as the weakest - VIPT.
|
||||
*/
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_EXACT, CTR_L1IP_SHIFT, 2, ICACHE_POLICY_VIPT), /* L1Ip */
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_IMINLINE_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_NONSTRICT, FTR_EXACT, CTR_EL0_L1Ip_SHIFT, 2, CTR_EL0_L1Ip_VIPT), /* L1Ip */
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, CTR_EL0_IminLine_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
@ -454,13 +454,13 @@ static const struct arm64_ftr_bits ftr_mvfr2[] = {
|
||||
};
|
||||
|
||||
static const struct arm64_ftr_bits ftr_dczid[] = {
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_EXACT, DCZID_DZP_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, DCZID_BS_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_EXACT, DCZID_EL0_DZP_SHIFT, 1, 1),
|
||||
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, DCZID_EL0_BS_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
static const struct arm64_ftr_bits ftr_gmid[] = {
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, SYS_GMID_EL1_BS_SHIFT, 4, 0),
|
||||
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, GMID_EL1_BS_SHIFT, 4, 0),
|
||||
ARM64_FTR_END,
|
||||
};
|
||||
|
||||
@ -1481,7 +1481,7 @@ static bool has_cache_idc(const struct arm64_cpu_capabilities *entry,
|
||||
else
|
||||
ctr = read_cpuid_effective_cachetype();
|
||||
|
||||
return ctr & BIT(CTR_IDC_SHIFT);
|
||||
return ctr & BIT(CTR_EL0_IDC_SHIFT);
|
||||
}
|
||||
|
||||
static void cpu_emulate_effective_ctr(const struct arm64_cpu_capabilities *__unused)
|
||||
@ -1492,7 +1492,7 @@ static void cpu_emulate_effective_ctr(const struct arm64_cpu_capabilities *__unu
|
||||
* to the CTR_EL0 on this CPU and emulate it with the real/safe
|
||||
* value.
|
||||
*/
|
||||
if (!(read_cpuid_cachetype() & BIT(CTR_IDC_SHIFT)))
|
||||
if (!(read_cpuid_cachetype() & BIT(CTR_EL0_IDC_SHIFT)))
|
||||
sysreg_clear_set(sctlr_el1, SCTLR_EL1_UCT, 0);
|
||||
}
|
||||
|
||||
@ -1506,7 +1506,7 @@ static bool has_cache_dic(const struct arm64_cpu_capabilities *entry,
|
||||
else
|
||||
ctr = read_cpuid_cachetype();
|
||||
|
||||
return ctr & BIT(CTR_DIC_SHIFT);
|
||||
return ctr & BIT(CTR_EL0_DIC_SHIFT);
|
||||
}
|
||||
|
||||
static bool __maybe_unused
|
||||
@ -2189,7 +2189,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.matches = has_cpuid_feature,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.field_pos = ID_AA64ISAR1_DPB_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_DPB_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = 1,
|
||||
},
|
||||
@ -2200,7 +2200,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.matches = has_cpuid_feature,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_DPB_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_DPB_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = 2,
|
||||
},
|
||||
@ -2360,7 +2360,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.matches = has_cpuid_feature,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.field_pos = ID_AA64ISAR1_SB_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_SB_SHIFT,
|
||||
.field_width = 4,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.min_field_value = 1,
|
||||
@ -2372,9 +2372,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_BOOT_CPU_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_APA_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_APA_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR1_APA_ARCHITECTED,
|
||||
.min_field_value = ID_AA64ISAR1_EL1_APA_PAuth,
|
||||
.matches = has_address_auth_cpucap,
|
||||
},
|
||||
{
|
||||
@ -2383,9 +2383,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_BOOT_CPU_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR2_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR2_APA3_SHIFT,
|
||||
.field_pos = ID_AA64ISAR2_EL1_APA3_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR2_APA3_ARCHITECTED,
|
||||
.min_field_value = ID_AA64ISAR2_EL1_APA3_PAuth,
|
||||
.matches = has_address_auth_cpucap,
|
||||
},
|
||||
{
|
||||
@ -2394,9 +2394,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_BOOT_CPU_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_API_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_API_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR1_API_IMP_DEF,
|
||||
.min_field_value = ID_AA64ISAR1_EL1_API_PAuth,
|
||||
.matches = has_address_auth_cpucap,
|
||||
},
|
||||
{
|
||||
@ -2410,9 +2410,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_GPA_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_GPA_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR1_GPA_ARCHITECTED,
|
||||
.min_field_value = ID_AA64ISAR1_EL1_GPA_IMP,
|
||||
.matches = has_cpuid_feature,
|
||||
},
|
||||
{
|
||||
@ -2421,9 +2421,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR2_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR2_GPA3_SHIFT,
|
||||
.field_pos = ID_AA64ISAR2_EL1_GPA3_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR2_GPA3_ARCHITECTED,
|
||||
.min_field_value = ID_AA64ISAR2_EL1_GPA3_IMP,
|
||||
.matches = has_cpuid_feature,
|
||||
},
|
||||
{
|
||||
@ -2432,9 +2432,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_GPI_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_GPI_SHIFT,
|
||||
.field_width = 4,
|
||||
.min_field_value = ID_AA64ISAR1_GPI_IMP_DEF,
|
||||
.min_field_value = ID_AA64ISAR1_EL1_GPI_IMP,
|
||||
.matches = has_cpuid_feature,
|
||||
},
|
||||
{
|
||||
@ -2535,7 +2535,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR1_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR1_LRCPC_SHIFT,
|
||||
.field_pos = ID_AA64ISAR1_EL1_LRCPC_SHIFT,
|
||||
.field_width = 4,
|
||||
.matches = has_cpuid_feature,
|
||||
.min_field_value = 1,
|
||||
@ -2560,9 +2560,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.capability = ARM64_SME_FA64,
|
||||
.sys_reg = SYS_ID_AA64SMFR0_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64SMFR0_FA64_SHIFT,
|
||||
.field_pos = ID_AA64SMFR0_EL1_FA64_SHIFT,
|
||||
.field_width = 1,
|
||||
.min_field_value = ID_AA64SMFR0_FA64,
|
||||
.min_field_value = ID_AA64SMFR0_EL1_FA64_IMP,
|
||||
.matches = has_cpuid_feature,
|
||||
.cpu_enable = fa64_kernel_enable,
|
||||
},
|
||||
@ -2573,10 +2573,10 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
.type = ARM64_CPUCAP_SYSTEM_FEATURE,
|
||||
.sys_reg = SYS_ID_AA64ISAR2_EL1,
|
||||
.sign = FTR_UNSIGNED,
|
||||
.field_pos = ID_AA64ISAR2_WFXT_SHIFT,
|
||||
.field_pos = ID_AA64ISAR2_EL1_WFxT_SHIFT,
|
||||
.field_width = 4,
|
||||
.matches = has_cpuid_feature,
|
||||
.min_field_value = ID_AA64ISAR2_WFXT_SUPPORTED,
|
||||
.min_field_value = ID_AA64ISAR2_EL1_WFxT_IMP,
|
||||
},
|
||||
{},
|
||||
};
|
||||
@ -2617,33 +2617,33 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
|
||||
#ifdef CONFIG_ARM64_PTR_AUTH
|
||||
static const struct arm64_cpu_capabilities ptr_auth_hwcap_addr_matches[] = {
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_APA_SHIFT,
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_APA_SHIFT,
|
||||
4, FTR_UNSIGNED,
|
||||
ID_AA64ISAR1_APA_ARCHITECTED)
|
||||
ID_AA64ISAR1_EL1_APA_PAuth)
|
||||
},
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_APA3_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR2_APA3_ARCHITECTED)
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_EL1_APA3_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR2_EL1_APA3_PAuth)
|
||||
},
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_API_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_API_IMP_DEF)
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_API_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_EL1_API_PAuth)
|
||||
},
|
||||
{},
|
||||
};
|
||||
|
||||
static const struct arm64_cpu_capabilities ptr_auth_hwcap_gen_matches[] = {
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_GPA_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_GPA_ARCHITECTED)
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_GPA_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_EL1_GPA_IMP)
|
||||
},
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_GPA3_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR2_GPA3_ARCHITECTED)
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_EL1_GPA3_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR2_EL1_GPA3_IMP)
|
||||
},
|
||||
{
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_GPI_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_GPI_IMP_DEF)
|
||||
HWCAP_CPUID_MATCH(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_GPI_SHIFT,
|
||||
4, FTR_UNSIGNED, ID_AA64ISAR1_EL1_GPI_IMP)
|
||||
},
|
||||
{},
|
||||
};
|
||||
@ -2671,30 +2671,30 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
|
||||
HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_ASIMD_SHIFT, 4, FTR_SIGNED, 0, CAP_HWCAP, KERNEL_HWCAP_ASIMD),
|
||||
HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_ASIMD_SHIFT, 4, FTR_SIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDHP),
|
||||
HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_DIT_SHIFT, 4, FTR_SIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DIT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_DPB_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DCPOP),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_DPB_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_DCPODP),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_JSCVT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_JSCVT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_FCMA_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FCMA),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_LRCPC_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_LRCPC),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_LRCPC_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_ILRCPC),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_FRINTTS_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FRINT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_SB_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SB),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_BF16_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_BF16),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_DGH_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DGH),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_I8MM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_I8MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DPB_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DCPOP),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DPB_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_DCPODP),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_JSCVT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_JSCVT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_FCMA_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FCMA),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_LRCPC_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_LRCPC),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_LRCPC_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_ILRCPC),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_FRINTTS_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FRINT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_SB_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SB),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_BF16_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_BF16),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DGH_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DGH),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_I8MM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_I8MM),
|
||||
HWCAP_CAP(SYS_ID_AA64MMFR2_EL1, ID_AA64MMFR2_AT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_USCAT),
|
||||
#ifdef CONFIG_ARM64_SVE
|
||||
HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_SVE_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR0_SVE, CAP_HWCAP, KERNEL_HWCAP_SVE),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_SVEVER_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_SVEVER_SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_AES_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_AES, CAP_HWCAP, KERNEL_HWCAP_SVEAES),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_AES_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_AES_PMULL, CAP_HWCAP, KERNEL_HWCAP_SVEPMULL),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_BITPERM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_BITPERM, CAP_HWCAP, KERNEL_HWCAP_SVEBITPERM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_BF16_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_BF16, CAP_HWCAP, KERNEL_HWCAP_SVEBF16),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_SHA3_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_SHA3, CAP_HWCAP, KERNEL_HWCAP_SVESHA3),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_SM4_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_SM4, CAP_HWCAP, KERNEL_HWCAP_SVESM4),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_I8MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_I8MM, CAP_HWCAP, KERNEL_HWCAP_SVEI8MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_F32MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_F32MM, CAP_HWCAP, KERNEL_HWCAP_SVEF32MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_F64MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_F64MM, CAP_HWCAP, KERNEL_HWCAP_SVEF64MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SVEver_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SVEver_SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_AES_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_AES_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEAES),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_AES_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_AES_PMULL128, CAP_HWCAP, KERNEL_HWCAP_SVEPMULL),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_BitPerm_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_BitPerm_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBITPERM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_BF16_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_BF16_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBF16),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SHA3_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SHA3_IMP, CAP_HWCAP, KERNEL_HWCAP_SVESHA3),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SM4_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SM4_IMP, CAP_HWCAP, KERNEL_HWCAP_SVESM4),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_I8MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_I8MM_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEI8MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_F32MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_F32MM_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF32MM),
|
||||
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_F64MM_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_F64MM_IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF64MM),
|
||||
#endif
|
||||
HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SSBS_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SSBS_PSTATE_INSNS, CAP_HWCAP, KERNEL_HWCAP_SSBS),
|
||||
#ifdef CONFIG_ARM64_BTI
|
||||
@ -2710,17 +2710,17 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
|
||||
#endif /* CONFIG_ARM64_MTE */
|
||||
HWCAP_CAP(SYS_ID_AA64MMFR0_EL1, ID_AA64MMFR0_ECV_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ECV),
|
||||
HWCAP_CAP(SYS_ID_AA64MMFR1_EL1, ID_AA64MMFR1_AFP_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_AFP),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_RPRES_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_RPRES),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_WFXT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_WFXT_SUPPORTED, CAP_HWCAP, KERNEL_HWCAP_WFXT),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_EL1_RPRES_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_RPRES),
|
||||
HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_EL1_WFxT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_EL1_WFxT_IMP, CAP_HWCAP, KERNEL_HWCAP_WFXT),
|
||||
#ifdef CONFIG_ARM64_SME
|
||||
HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SME_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SME, CAP_HWCAP, KERNEL_HWCAP_SME),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_FA64, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I16I64, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F64F64, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I8I32, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F16F32, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_B16F32, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F32F32, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_FA64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I16I64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F64F64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I8I32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_B16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
|
||||
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F32F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
|
||||
#endif /* CONFIG_ARM64_SME */
|
||||
{},
|
||||
};
|
||||
|
@ -33,12 +33,19 @@
|
||||
DEFINE_PER_CPU(struct cpuinfo_arm64, cpu_data);
|
||||
static struct cpuinfo_arm64 boot_cpu_data;
|
||||
|
||||
static const char *icache_policy_str[] = {
|
||||
[ICACHE_POLICY_VPIPT] = "VPIPT",
|
||||
[ICACHE_POLICY_RESERVED] = "RESERVED/UNKNOWN",
|
||||
[ICACHE_POLICY_VIPT] = "VIPT",
|
||||
[ICACHE_POLICY_PIPT] = "PIPT",
|
||||
};
|
||||
static inline const char *icache_policy_str(int l1ip)
|
||||
{
|
||||
switch (l1ip) {
|
||||
case CTR_EL0_L1Ip_VPIPT:
|
||||
return "VPIPT";
|
||||
case CTR_EL0_L1Ip_VIPT:
|
||||
return "VIPT";
|
||||
case CTR_EL0_L1Ip_PIPT:
|
||||
return "PIPT";
|
||||
default:
|
||||
return "RESERVED/UNKNOWN";
|
||||
}
|
||||
}
|
||||
|
||||
unsigned long __icache_flags;
|
||||
|
||||
@ -355,19 +362,19 @@ static void cpuinfo_detect_icache_policy(struct cpuinfo_arm64 *info)
|
||||
u32 l1ip = CTR_L1IP(info->reg_ctr);
|
||||
|
||||
switch (l1ip) {
|
||||
case ICACHE_POLICY_PIPT:
|
||||
case CTR_EL0_L1Ip_PIPT:
|
||||
break;
|
||||
case ICACHE_POLICY_VPIPT:
|
||||
case CTR_EL0_L1Ip_VPIPT:
|
||||
set_bit(ICACHEF_VPIPT, &__icache_flags);
|
||||
break;
|
||||
case ICACHE_POLICY_RESERVED:
|
||||
case ICACHE_POLICY_VIPT:
|
||||
case CTR_EL0_L1Ip_VIPT:
|
||||
default:
|
||||
/* Assume aliasing */
|
||||
set_bit(ICACHEF_ALIASING, &__icache_flags);
|
||||
break;
|
||||
}
|
||||
|
||||
pr_info("Detected %s I-cache on CPU%d\n", icache_policy_str[l1ip], cpu);
|
||||
pr_info("Detected %s I-cache on CPU%d\n", icache_policy_str(l1ip), cpu);
|
||||
}
|
||||
|
||||
static void __cpuinfo_store_cpu_32bit(struct cpuinfo_32bit *info)
|
||||
|
@ -53,7 +53,7 @@ static const struct ftr_set_desc pfr1 __initconst = {
|
||||
.name = "id_aa64pfr1",
|
||||
.override = &id_aa64pfr1_override,
|
||||
.fields = {
|
||||
{ "bt", ID_AA64PFR1_BT_SHIFT },
|
||||
{ "bt", ID_AA64PFR1_BT_SHIFT },
|
||||
{ "mte", ID_AA64PFR1_MTE_SHIFT},
|
||||
{}
|
||||
},
|
||||
@ -63,10 +63,10 @@ static const struct ftr_set_desc isar1 __initconst = {
|
||||
.name = "id_aa64isar1",
|
||||
.override = &id_aa64isar1_override,
|
||||
.fields = {
|
||||
{ "gpi", ID_AA64ISAR1_GPI_SHIFT },
|
||||
{ "gpa", ID_AA64ISAR1_GPA_SHIFT },
|
||||
{ "api", ID_AA64ISAR1_API_SHIFT },
|
||||
{ "apa", ID_AA64ISAR1_APA_SHIFT },
|
||||
{ "gpi", ID_AA64ISAR1_EL1_GPI_SHIFT },
|
||||
{ "gpa", ID_AA64ISAR1_EL1_GPA_SHIFT },
|
||||
{ "api", ID_AA64ISAR1_EL1_API_SHIFT },
|
||||
{ "apa", ID_AA64ISAR1_EL1_APA_SHIFT },
|
||||
{}
|
||||
},
|
||||
};
|
||||
@ -75,8 +75,8 @@ static const struct ftr_set_desc isar2 __initconst = {
|
||||
.name = "id_aa64isar2",
|
||||
.override = &id_aa64isar2_override,
|
||||
.fields = {
|
||||
{ "gpa3", ID_AA64ISAR2_GPA3_SHIFT },
|
||||
{ "apa3", ID_AA64ISAR2_APA3_SHIFT },
|
||||
{ "gpa3", ID_AA64ISAR2_EL1_GPA3_SHIFT },
|
||||
{ "apa3", ID_AA64ISAR2_EL1_APA3_SHIFT },
|
||||
{}
|
||||
},
|
||||
};
|
||||
|
@ -579,11 +579,11 @@ static void ctr_read_handler(unsigned long esr, struct pt_regs *regs)
|
||||
|
||||
if (cpus_have_const_cap(ARM64_WORKAROUND_1542419)) {
|
||||
/* Hide DIC so that we can trap the unnecessary maintenance...*/
|
||||
val &= ~BIT(CTR_DIC_SHIFT);
|
||||
val &= ~BIT(CTR_EL0_DIC_SHIFT);
|
||||
|
||||
/* ... and fake IminLine to reduce the number of traps. */
|
||||
val &= ~CTR_IMINLINE_MASK;
|
||||
val |= (PAGE_SHIFT - 2) & CTR_IMINLINE_MASK;
|
||||
val &= ~CTR_EL0_IminLine_MASK;
|
||||
val |= (PAGE_SHIFT - 2) & CTR_EL0_IminLine_MASK;
|
||||
}
|
||||
|
||||
pt_regs_write_reg(regs, rt, val);
|
||||
|
@ -176,25 +176,25 @@
|
||||
)
|
||||
|
||||
#define PVM_ID_AA64ISAR1_ALLOW (\
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_DPB) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_APA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_API) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_JSCVT) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_FCMA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_LRCPC) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPI) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_FRINTTS) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_SB) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_SPECRES) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_BF16) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_DGH) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_I8MM) \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_DPB) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_APA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_API) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_JSCVT) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_FCMA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_LRCPC) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPA) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPI) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_FRINTTS) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_SB) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_SPECRES) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_BF16) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_DGH) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_I8MM) \
|
||||
)
|
||||
|
||||
#define PVM_ID_AA64ISAR2_ALLOW (\
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_GPA3) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_APA3) \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_GPA3) | \
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_APA3) \
|
||||
)
|
||||
|
||||
u64 pvm_read_id_reg(const struct kvm_vcpu *vcpu, u32 id);
|
||||
|
@ -173,10 +173,10 @@ static u64 get_pvm_id_aa64isar1(const struct kvm_vcpu *vcpu)
|
||||
u64 allow_mask = PVM_ID_AA64ISAR1_ALLOW;
|
||||
|
||||
if (!vcpu_has_ptrauth(vcpu))
|
||||
allow_mask &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR1_APA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_API) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPI));
|
||||
allow_mask &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_APA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_API) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPI));
|
||||
|
||||
return id_aa64isar1_el1_sys_val & allow_mask;
|
||||
}
|
||||
@ -186,8 +186,8 @@ static u64 get_pvm_id_aa64isar2(const struct kvm_vcpu *vcpu)
|
||||
u64 allow_mask = PVM_ID_AA64ISAR2_ALLOW;
|
||||
|
||||
if (!vcpu_has_ptrauth(vcpu))
|
||||
allow_mask &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR2_APA3) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_GPA3));
|
||||
allow_mask &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_APA3) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_GPA3));
|
||||
|
||||
return id_aa64isar2_el1_sys_val & allow_mask;
|
||||
}
|
||||
|
@ -1136,17 +1136,17 @@ static u64 read_id_reg(const struct kvm_vcpu *vcpu,
|
||||
break;
|
||||
case SYS_ID_AA64ISAR1_EL1:
|
||||
if (!vcpu_has_ptrauth(vcpu))
|
||||
val &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR1_APA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_API) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_GPI));
|
||||
val &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_APA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_API) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPA) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_GPI));
|
||||
break;
|
||||
case SYS_ID_AA64ISAR2_EL1:
|
||||
if (!vcpu_has_ptrauth(vcpu))
|
||||
val &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR2_APA3) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_GPA3));
|
||||
val &= ~(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_APA3) |
|
||||
ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_GPA3));
|
||||
if (!cpus_have_final_cap(ARM64_HAS_WFXT))
|
||||
val &= ~ARM64_FEATURE_MASK(ID_AA64ISAR2_WFXT);
|
||||
val &= ~ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_WFxT);
|
||||
break;
|
||||
case SYS_ID_AA64DFR0_EL1:
|
||||
/* Limit debug to ARMv8.0 */
|
||||
|
@ -18,7 +18,7 @@
|
||||
*/
|
||||
.macro multitag_transfer_size, reg, tmp
|
||||
mrs_s \reg, SYS_GMID_EL1
|
||||
ubfx \reg, \reg, #SYS_GMID_EL1_BS_SHIFT, #SYS_GMID_EL1_BS_SIZE
|
||||
ubfx \reg, \reg, #GMID_EL1_BS_SHIFT, #GMID_EL1_BS_SIZE
|
||||
mov \tmp, #4
|
||||
lsl \reg, \tmp, \reg
|
||||
.endm
|
||||
|
@ -88,7 +88,7 @@ END {
|
||||
|
||||
# skip blank lines and comment lines
|
||||
/^$/ { next }
|
||||
/^#/ { next }
|
||||
/^[\t ]*#/ { next }
|
||||
|
||||
/^SysregFields/ {
|
||||
change_block("SysregFields", "None", "SysregFields")
|
||||
|
@ -46,6 +46,89 @@
|
||||
# feature that introduces them (eg, FEAT_LS64_ACCDATA introduces enumeration
|
||||
# item ACCDATA) though it may be more taseful to do something else.
|
||||
|
||||
Sysreg ID_AA64ZFR0_EL1 3 0 0 4 4
|
||||
Res0 63:60
|
||||
Enum 59:56 F64MM
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 55:52 F32MM
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Res0 51:48
|
||||
Enum 47:44 I8MM
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 43:40 SM4
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Res0 39:36
|
||||
Enum 35:32 SHA3
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Res0 31:24
|
||||
Enum 23:20 BF16
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
0b0010 EBF16
|
||||
EndEnum
|
||||
Enum 19:16 BitPerm
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Res0 15:8
|
||||
Enum 7:4 AES
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
0b0010 PMULL128
|
||||
EndEnum
|
||||
Enum 3:0 SVEver
|
||||
0b0000 IMP
|
||||
0b0001 SVE2
|
||||
EndEnum
|
||||
EndSysreg
|
||||
|
||||
Sysreg ID_AA64SMFR0_EL1 3 0 0 4 5
|
||||
Enum 63 FA64
|
||||
0b0 NI
|
||||
0b1 IMP
|
||||
EndEnum
|
||||
Res0 62:60
|
||||
Field 59:56 SMEver
|
||||
Enum 55:52 I16I64
|
||||
0b0000 NI
|
||||
0b1111 IMP
|
||||
EndEnum
|
||||
Res0 51:49
|
||||
Enum 48 F64F64
|
||||
0b0 NI
|
||||
0b1 IMP
|
||||
EndEnum
|
||||
Res0 47:40
|
||||
Enum 39:36 I8I32
|
||||
0b0000 NI
|
||||
0b1111 IMP
|
||||
EndEnum
|
||||
Enum 35 F16F32
|
||||
0b0 NI
|
||||
0b1 IMP
|
||||
EndEnum
|
||||
Enum 34 B16F32
|
||||
0b0 NI
|
||||
0b1 IMP
|
||||
EndEnum
|
||||
Res0 33
|
||||
Enum 32 F32F32
|
||||
0b0 NI
|
||||
0b1 IMP
|
||||
EndEnum
|
||||
Res0 31:0
|
||||
EndSysreg
|
||||
|
||||
Sysreg ID_AA64ISAR0_EL1 3 0 0 6 0
|
||||
Enum 63:60 RNDR
|
||||
0b0000 NI
|
||||
@ -114,6 +197,122 @@ EndEnum
|
||||
Res0 3:0
|
||||
EndSysreg
|
||||
|
||||
Sysreg ID_AA64ISAR1_EL1 3 0 0 6 1
|
||||
Enum 63:60 LS64
|
||||
0b0000 NI
|
||||
0b0001 LS64
|
||||
0b0010 LS64_V
|
||||
0b0011 LS64_ACCDATA
|
||||
EndEnum
|
||||
Enum 59:56 XS
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 55:52 I8MM
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 51:48 DGH
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 47:44 BF16
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
0b0010 EBF16
|
||||
EndEnum
|
||||
Enum 43:40 SPECRES
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 39:36 SB
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 35:32 FRINTTS
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 31:28 GPI
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 27:24 GPA
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 23:20 LRCPC
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
0b0010 LRCPC2
|
||||
EndEnum
|
||||
Enum 19:16 FCMA
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 15:12 JSCVT
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 11:8 API
|
||||
0b0000 NI
|
||||
0b0001 PAuth
|
||||
0b0010 EPAC
|
||||
0b0011 PAuth2
|
||||
0b0100 FPAC
|
||||
0b0101 FPACCOMBINE
|
||||
EndEnum
|
||||
Enum 7:4 APA
|
||||
0b0000 NI
|
||||
0b0001 PAuth
|
||||
0b0010 EPAC
|
||||
0b0011 PAuth2
|
||||
0b0100 FPAC
|
||||
0b0101 FPACCOMBINE
|
||||
EndEnum
|
||||
Enum 3:0 DPB
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
0b0010 DPB2
|
||||
EndEnum
|
||||
EndSysreg
|
||||
|
||||
Sysreg ID_AA64ISAR2_EL1 3 0 0 6 2
|
||||
Res0 63:28
|
||||
Enum 27:24 PAC_frac
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 23:20 BC
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 19:16 MOPS
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 15:12 APA3
|
||||
0b0000 NI
|
||||
0b0001 PAuth
|
||||
0b0010 EPAC
|
||||
0b0011 PAuth2
|
||||
0b0100 FPAC
|
||||
0b0101 FPACCOMBINE
|
||||
EndEnum
|
||||
Enum 11:8 GPA3
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 7:4 RPRES
|
||||
0b0000 NI
|
||||
0b0001 IMP
|
||||
EndEnum
|
||||
Enum 3:0 WFxT
|
||||
0b0000 NI
|
||||
0b0010 IMP
|
||||
EndEnum
|
||||
EndSysreg
|
||||
|
||||
Sysreg SCTLR_EL1 3 0 1 0 0
|
||||
Field 63 TIDCP
|
||||
Field 62 SPINMASK
|
||||
@ -257,6 +456,11 @@ Field 5:3 Ctype2
|
||||
Field 2:0 Ctype1
|
||||
EndSysreg
|
||||
|
||||
Sysreg GMID_EL1 3 1 0 0 4
|
||||
Res0 63:4
|
||||
Field 3:0 BS
|
||||
EndSysreg
|
||||
|
||||
Sysreg SMIDR_EL1 3 1 0 0 6
|
||||
Res0 63:32
|
||||
Field 31:24 IMPLEMENTER
|
||||
@ -273,6 +477,33 @@ Field 3:1 Level
|
||||
Field 0 InD
|
||||
EndSysreg
|
||||
|
||||
Sysreg CTR_EL0 3 3 0 0 1
|
||||
Res0 63:38
|
||||
Field 37:32 TminLine
|
||||
Res1 31
|
||||
Res0 30
|
||||
Field 29 DIC
|
||||
Field 28 IDC
|
||||
Field 27:24 CWG
|
||||
Field 23:20 ERG
|
||||
Field 19:16 DminLine
|
||||
Enum 15:14 L1Ip
|
||||
0b00 VPIPT
|
||||
# This is named as AIVIVT in the ARM but documented as reserved
|
||||
0b01 RESERVED
|
||||
0b10 VIPT
|
||||
0b11 PIPT
|
||||
EndEnum
|
||||
Res0 13:4
|
||||
Field 3:0 IminLine
|
||||
EndSysreg
|
||||
|
||||
Sysreg DCZID_EL0 3 3 0 0 7
|
||||
Res0 63:5
|
||||
Field 4 DZP
|
||||
Field 3:0 BS
|
||||
EndSysreg
|
||||
|
||||
Sysreg SVCR 3 3 4 2 2
|
||||
Res0 63:2
|
||||
Field 1 ZA
|
||||
@ -367,3 +598,36 @@ EndSysreg
|
||||
Sysreg TTBR1_EL1 3 0 2 0 1
|
||||
Fields TTBRx_EL1
|
||||
EndSysreg
|
||||
|
||||
Sysreg LORSA_EL1 3 0 10 4 0
|
||||
Res0 63:52
|
||||
Field 51:16 SA
|
||||
Res0 15:1
|
||||
Field 0 Valid
|
||||
EndSysreg
|
||||
|
||||
Sysreg LOREA_EL1 3 0 10 4 1
|
||||
Res0 63:52
|
||||
Field 51:48 EA_51_48
|
||||
Field 47:16 EA_47_16
|
||||
Res0 15:0
|
||||
EndSysreg
|
||||
|
||||
Sysreg LORN_EL1 3 0 10 4 2
|
||||
Res0 63:8
|
||||
Field 7:0 Num
|
||||
EndSysreg
|
||||
|
||||
Sysreg LORC_EL1 3 0 10 4 3
|
||||
Res0 63:10
|
||||
Field 9:2 DS
|
||||
Res0 1
|
||||
Field 0 EN
|
||||
EndSysreg
|
||||
|
||||
Sysreg LORID_EL1 3 0 10 4 7
|
||||
Res0 63:24
|
||||
Field 23:16 LD
|
||||
Res0 15:8
|
||||
Field 7:0 LR
|
||||
EndSysreg
|
||||
|
Loading…
Reference in New Issue
Block a user