2015-01-14 15:09:19 +10:00
|
|
|
#ifndef __NVKM_MMU_H__
|
|
|
|
#define __NVKM_MMU_H__
|
2012-07-14 19:09:17 +10:00
|
|
|
#include <core/subdev.h>
|
2012-07-04 23:44:54 +10:00
|
|
|
#include <core/mm.h>
|
2017-11-01 03:56:19 +10:00
|
|
|
struct nvkm_gpuobj;
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_mem;
|
2010-08-27 10:00:25 +10:00
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_vm_pgt {
|
2015-08-20 14:54:17 +10:00
|
|
|
struct nvkm_memory *mem[2];
|
2010-12-15 11:04:39 +10:00
|
|
|
u32 refcount[2];
|
2010-08-27 10:00:25 +10:00
|
|
|
};
|
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_vm_pgd {
|
2010-08-27 10:00:25 +10:00
|
|
|
struct list_head head;
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_gpuobj *obj;
|
2010-08-27 10:00:25 +10:00
|
|
|
};
|
2012-07-14 19:09:17 +10:00
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_vma {
|
|
|
|
struct nvkm_vm *vm;
|
|
|
|
struct nvkm_mm_node *node;
|
2017-11-01 03:56:19 +10:00
|
|
|
union {
|
|
|
|
u64 offset;
|
|
|
|
u64 addr;
|
|
|
|
};
|
2010-08-27 10:00:25 +10:00
|
|
|
u32 access;
|
|
|
|
};
|
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_vm {
|
|
|
|
struct nvkm_mmu *mmu;
|
2015-08-20 14:54:17 +10:00
|
|
|
|
|
|
|
struct mutex mutex;
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_mm mm;
|
2013-07-30 11:47:47 +10:00
|
|
|
struct kref refcount;
|
2010-08-27 10:00:25 +10:00
|
|
|
|
|
|
|
struct list_head pgd_list;
|
2015-08-20 14:54:22 +10:00
|
|
|
atomic_t engref[NVKM_SUBDEV_NR];
|
2010-08-27 10:00:25 +10:00
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
struct nvkm_vm_pgt *pgt;
|
2010-08-27 10:00:25 +10:00
|
|
|
u32 fpde;
|
|
|
|
u32 lpde;
|
2012-07-14 19:09:17 +10:00
|
|
|
};
|
|
|
|
|
2015-01-14 15:09:19 +10:00
|
|
|
int nvkm_vm_new(struct nvkm_device *, u64 offset, u64 length, u64 mm_offset,
|
2015-08-20 14:54:17 +10:00
|
|
|
struct lock_class_key *, struct nvkm_vm **);
|
2015-01-14 15:09:19 +10:00
|
|
|
int nvkm_vm_ref(struct nvkm_vm *, struct nvkm_vm **, struct nvkm_gpuobj *pgd);
|
2015-08-20 14:54:17 +10:00
|
|
|
int nvkm_vm_boot(struct nvkm_vm *, u64 size);
|
2015-01-14 15:09:19 +10:00
|
|
|
int nvkm_vm_get(struct nvkm_vm *, u64 size, u32 page_shift, u32 access,
|
|
|
|
struct nvkm_vma *);
|
|
|
|
void nvkm_vm_put(struct nvkm_vma *);
|
|
|
|
void nvkm_vm_map(struct nvkm_vma *, struct nvkm_mem *);
|
|
|
|
void nvkm_vm_map_at(struct nvkm_vma *, u64 offset, struct nvkm_mem *);
|
|
|
|
void nvkm_vm_unmap(struct nvkm_vma *);
|
|
|
|
void nvkm_vm_unmap_at(struct nvkm_vma *, u64 offset, u64 length);
|
2015-08-20 14:54:21 +10:00
|
|
|
|
|
|
|
struct nvkm_mmu {
|
|
|
|
const struct nvkm_mmu_func *func;
|
|
|
|
struct nvkm_subdev subdev;
|
|
|
|
|
|
|
|
u64 limit;
|
|
|
|
u8 dma_bits;
|
|
|
|
u8 lpg_shift;
|
2017-11-01 03:56:19 +10:00
|
|
|
|
|
|
|
struct nvkm_vmm *vmm;
|
2015-08-20 14:54:21 +10:00
|
|
|
};
|
|
|
|
|
|
|
|
int nv04_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
|
|
|
int nv41_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
|
|
|
int nv44_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
|
|
|
int nv50_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
2017-11-01 03:56:19 +10:00
|
|
|
int g84_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
2015-08-20 14:54:21 +10:00
|
|
|
int gf100_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
2017-11-01 03:56:19 +10:00
|
|
|
int gk104_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
|
2010-08-27 10:00:25 +10:00
|
|
|
#endif
|