2018-05-08 14:20:54 +00:00
|
|
|
/* SPDX-License-Identifier: GPL-2.0 */
|
2015-12-03 17:21:29 +00:00
|
|
|
/*
|
2018-05-08 14:20:54 +00:00
|
|
|
* Copyright (C) 2015-2018 Etnaviv Project
|
2015-12-03 17:21:29 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef __ETNAVIV_MMU_H__
|
|
|
|
#define __ETNAVIV_MMU_H__
|
|
|
|
|
2017-09-07 15:06:28 +00:00
|
|
|
#define ETNAVIV_PROT_READ (1 << 0)
|
|
|
|
#define ETNAVIV_PROT_WRITE (1 << 1)
|
2015-12-03 17:21:29 +00:00
|
|
|
|
|
|
|
enum etnaviv_iommu_version {
|
|
|
|
ETNAVIV_IOMMU_V1 = 0,
|
|
|
|
ETNAVIV_IOMMU_V2,
|
|
|
|
};
|
|
|
|
|
|
|
|
struct etnaviv_gpu;
|
|
|
|
struct etnaviv_vram_mapping;
|
2017-09-07 15:06:28 +00:00
|
|
|
struct etnaviv_iommu_domain;
|
|
|
|
|
|
|
|
struct etnaviv_iommu_domain_ops {
|
|
|
|
void (*free)(struct etnaviv_iommu_domain *);
|
|
|
|
int (*map)(struct etnaviv_iommu_domain *domain, unsigned long iova,
|
|
|
|
phys_addr_t paddr, size_t size, int prot);
|
|
|
|
size_t (*unmap)(struct etnaviv_iommu_domain *domain, unsigned long iova,
|
|
|
|
size_t size);
|
|
|
|
size_t (*dump_size)(struct etnaviv_iommu_domain *);
|
|
|
|
void (*dump)(struct etnaviv_iommu_domain *, void *);
|
|
|
|
};
|
|
|
|
|
|
|
|
struct etnaviv_iommu_domain {
|
|
|
|
struct device *dev;
|
|
|
|
void *bad_page_cpu;
|
|
|
|
dma_addr_t bad_page_dma;
|
|
|
|
u64 base;
|
|
|
|
u64 size;
|
2015-12-03 17:21:29 +00:00
|
|
|
|
2017-09-07 15:06:28 +00:00
|
|
|
const struct etnaviv_iommu_domain_ops *ops;
|
2015-12-03 17:21:29 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct etnaviv_iommu {
|
|
|
|
struct etnaviv_gpu *gpu;
|
2017-09-07 15:06:28 +00:00
|
|
|
struct etnaviv_iommu_domain *domain;
|
2015-12-03 17:21:29 +00:00
|
|
|
|
|
|
|
enum etnaviv_iommu_version version;
|
|
|
|
|
|
|
|
/* memory manager for GPU address area */
|
|
|
|
struct mutex lock;
|
|
|
|
struct list_head mappings;
|
|
|
|
struct drm_mm mm;
|
2019-07-05 17:17:23 +00:00
|
|
|
unsigned int flush_seq;
|
2015-12-03 17:21:29 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct etnaviv_gem_object;
|
|
|
|
|
|
|
|
int etnaviv_iommu_map_gem(struct etnaviv_iommu *mmu,
|
|
|
|
struct etnaviv_gem_object *etnaviv_obj, u32 memory_base,
|
|
|
|
struct etnaviv_vram_mapping *mapping);
|
|
|
|
void etnaviv_iommu_unmap_gem(struct etnaviv_iommu *mmu,
|
|
|
|
struct etnaviv_vram_mapping *mapping);
|
|
|
|
|
2019-07-05 17:17:21 +00:00
|
|
|
int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu *mmu,
|
|
|
|
struct etnaviv_vram_mapping *mapping,
|
|
|
|
u32 memory_base, dma_addr_t paddr,
|
|
|
|
size_t size);
|
|
|
|
void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu *mmu,
|
|
|
|
struct etnaviv_vram_mapping *mapping);
|
2016-08-17 12:57:51 +00:00
|
|
|
|
2015-12-03 17:21:29 +00:00
|
|
|
size_t etnaviv_iommu_dump_size(struct etnaviv_iommu *iommu);
|
|
|
|
void etnaviv_iommu_dump(struct etnaviv_iommu *iommu, void *buf);
|
|
|
|
|
2016-08-16 10:09:08 +00:00
|
|
|
struct etnaviv_iommu *etnaviv_iommu_new(struct etnaviv_gpu *gpu);
|
2017-09-07 15:06:28 +00:00
|
|
|
void etnaviv_iommu_destroy(struct etnaviv_iommu *iommu);
|
2016-08-16 09:54:51 +00:00
|
|
|
void etnaviv_iommu_restore(struct etnaviv_gpu *gpu);
|
2015-12-03 17:21:29 +00:00
|
|
|
|
|
|
|
#endif /* __ETNAVIV_MMU_H__ */
|