iommu/arm-smmu-v3: Build the whole STE in arm_smmu_make_s2_domain_ste()

Half the code was living in arm_smmu_domain_finalise_s2(), just move it
here and take the values directly from the pgtbl_ops instead of storing
copies.

Reviewed-by: Michael Shavit <mshavit@google.com>
Reviewed-by: Nicolin Chen <nicolinc@nvidia.com>
Reviewed-by: Mostafa Saleh <smostafa@google.com>
Tested-by: Shameer Kolothum <shameerali.kolothum.thodi@huawei.com>
Tested-by: Nicolin Chen <nicolinc@nvidia.com>
Tested-by: Moritz Fischer <moritzf@google.com>
Signed-off-by: Jason Gunthorpe <jgg@nvidia.com>
Link: https://lore.kernel.org/r/4-v6-96275f25c39d+2d4-smmuv3_newapi_p1_jgg@nvidia.com
Signed-off-by: Will Deacon <will@kernel.org>
This commit is contained in:
Jason Gunthorpe 2024-02-26 13:07:15 -04:00 committed by Will Deacon
parent efe15df087
commit 71b0aa10b1
2 changed files with 15 additions and 14 deletions

View File

@ -1520,6 +1520,11 @@ static void arm_smmu_make_s2_domain_ste(struct arm_smmu_ste *target,
struct arm_smmu_domain *smmu_domain)
{
struct arm_smmu_s2_cfg *s2_cfg = &smmu_domain->s2_cfg;
const struct io_pgtable_cfg *pgtbl_cfg =
&io_pgtable_ops_to_pgtable(smmu_domain->pgtbl_ops)->cfg;
typeof(&pgtbl_cfg->arm_lpae_s2_cfg.vtcr) vtcr =
&pgtbl_cfg->arm_lpae_s2_cfg.vtcr;
u64 vtcr_val;
memset(target, 0, sizeof(*target));
target->data[0] = cpu_to_le64(
@ -1532,9 +1537,16 @@ static void arm_smmu_make_s2_domain_ste(struct arm_smmu_ste *target,
FIELD_PREP(STRTAB_STE_1_SHCFG,
STRTAB_STE_1_SHCFG_INCOMING));
vtcr_val = FIELD_PREP(STRTAB_STE_2_VTCR_S2T0SZ, vtcr->tsz) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2SL0, vtcr->sl) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2IR0, vtcr->irgn) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2OR0, vtcr->orgn) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2SH0, vtcr->sh) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2TG, vtcr->tg) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2PS, vtcr->ps);
target->data[2] = cpu_to_le64(
FIELD_PREP(STRTAB_STE_2_S2VMID, s2_cfg->vmid) |
FIELD_PREP(STRTAB_STE_2_VTCR, s2_cfg->vtcr) |
FIELD_PREP(STRTAB_STE_2_VTCR, vtcr_val) |
STRTAB_STE_2_S2AA64 |
#ifdef __BIG_ENDIAN
STRTAB_STE_2_S2ENDI |
@ -1542,7 +1554,8 @@ static void arm_smmu_make_s2_domain_ste(struct arm_smmu_ste *target,
STRTAB_STE_2_S2PTW |
STRTAB_STE_2_S2R);
target->data[3] = cpu_to_le64(s2_cfg->vttbr & STRTAB_STE_3_S2TTB_MASK);
target->data[3] = cpu_to_le64(pgtbl_cfg->arm_lpae_s2_cfg.vttbr &
STRTAB_STE_3_S2TTB_MASK);
}
static void arm_smmu_write_strtab_ent(struct arm_smmu_master *master, u32 sid,
@ -2302,7 +2315,6 @@ static int arm_smmu_domain_finalise_s2(struct arm_smmu_domain *smmu_domain,
int vmid;
struct arm_smmu_device *smmu = smmu_domain->smmu;
struct arm_smmu_s2_cfg *cfg = &smmu_domain->s2_cfg;
typeof(&pgtbl_cfg->arm_lpae_s2_cfg.vtcr) vtcr;
/* Reserve VMID 0 for stage-2 bypass STEs */
vmid = ida_alloc_range(&smmu->vmid_map, 1, (1 << smmu->vmid_bits) - 1,
@ -2310,16 +2322,7 @@ static int arm_smmu_domain_finalise_s2(struct arm_smmu_domain *smmu_domain,
if (vmid < 0)
return vmid;
vtcr = &pgtbl_cfg->arm_lpae_s2_cfg.vtcr;
cfg->vmid = (u16)vmid;
cfg->vttbr = pgtbl_cfg->arm_lpae_s2_cfg.vttbr;
cfg->vtcr = FIELD_PREP(STRTAB_STE_2_VTCR_S2T0SZ, vtcr->tsz) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2SL0, vtcr->sl) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2IR0, vtcr->irgn) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2OR0, vtcr->orgn) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2SH0, vtcr->sh) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2TG, vtcr->tg) |
FIELD_PREP(STRTAB_STE_2_VTCR_S2PS, vtcr->ps);
return 0;
}

View File

@ -609,8 +609,6 @@ struct arm_smmu_ctx_desc_cfg {
struct arm_smmu_s2_cfg {
u16 vmid;
u64 vttbr;
u64 vtcr;
};
struct arm_smmu_strtab_cfg {