2018-05-08 16:20:54 +02:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
2015-12-03 18:21:29 +01:00
|
|
|
/*
|
2018-05-08 16:20:54 +02:00
|
|
|
* Copyright (C) 2014-2018 Etnaviv Project
|
2015-12-03 18:21:29 +01:00
|
|
|
*/
|
|
|
|
|
2019-06-30 07:21:03 +02:00
|
|
|
#include <drm/drm_prime.h>
|
2015-12-03 18:21:29 +01:00
|
|
|
#include <linux/dma-buf.h>
|
2021-10-10 14:46:28 +02:00
|
|
|
#include <linux/module.h>
|
2019-06-30 07:21:03 +02:00
|
|
|
|
2015-12-03 18:21:29 +01:00
|
|
|
#include "etnaviv_drv.h"
|
|
|
|
#include "etnaviv_gem.h"
|
|
|
|
|
module: Convert symbol namespace to string literal
Clean up the existing export namespace code along the same lines of
commit 33def8498fdd ("treewide: Convert macro and uses of __section(foo)
to __section("foo")") and for the same reason, it is not desired for the
namespace argument to be a macro expansion itself.
Scripted using
git grep -l -e MODULE_IMPORT_NS -e EXPORT_SYMBOL_NS | while read file;
do
awk -i inplace '
/^#define EXPORT_SYMBOL_NS/ {
gsub(/__stringify\(ns\)/, "ns");
print;
next;
}
/^#define MODULE_IMPORT_NS/ {
gsub(/__stringify\(ns\)/, "ns");
print;
next;
}
/MODULE_IMPORT_NS/ {
$0 = gensub(/MODULE_IMPORT_NS\(([^)]*)\)/, "MODULE_IMPORT_NS(\"\\1\")", "g");
}
/EXPORT_SYMBOL_NS/ {
if ($0 ~ /(EXPORT_SYMBOL_NS[^(]*)\(([^,]+),/) {
if ($0 !~ /(EXPORT_SYMBOL_NS[^(]*)\(([^,]+), ([^)]+)\)/ &&
$0 !~ /(EXPORT_SYMBOL_NS[^(]*)\(\)/ &&
$0 !~ /^my/) {
getline line;
gsub(/[[:space:]]*\\$/, "");
gsub(/[[:space:]]/, "", line);
$0 = $0 " " line;
}
$0 = gensub(/(EXPORT_SYMBOL_NS[^(]*)\(([^,]+), ([^)]+)\)/,
"\\1(\\2, \"\\3\")", "g");
}
}
{ print }' $file;
done
Requested-by: Masahiro Yamada <masahiroy@kernel.org>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://mail.google.com/mail/u/2/#inbox/FMfcgzQXKWgMmjdFwwdsfgxzKpVHWPlc
Acked-by: Greg KH <gregkh@linuxfoundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
2024-12-02 15:59:47 +01:00
|
|
|
MODULE_IMPORT_NS("DMA_BUF");
|
2021-10-10 14:46:28 +02:00
|
|
|
|
2017-11-17 11:14:58 +01:00
|
|
|
static struct lock_class_key etnaviv_prime_lock_class;
|
2015-12-03 18:21:29 +01:00
|
|
|
|
|
|
|
struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj)
|
|
|
|
{
|
|
|
|
struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
|
2024-09-08 20:11:03 +08:00
|
|
|
unsigned int npages = obj->size >> PAGE_SHIFT;
|
2015-12-03 18:21:29 +01:00
|
|
|
|
2016-11-30 15:26:26 +01:00
|
|
|
if (WARN_ON(!etnaviv_obj->pages)) /* should have already pinned! */
|
2019-02-05 12:08:19 +03:00
|
|
|
return ERR_PTR(-EINVAL);
|
2015-12-03 18:21:29 +01:00
|
|
|
|
2020-09-07 13:24:25 +02:00
|
|
|
return drm_prime_pages_to_sg(obj->dev, etnaviv_obj->pages, npages);
|
2015-12-03 18:21:29 +01:00
|
|
|
}
|
|
|
|
|
2022-02-04 09:05:41 -08:00
|
|
|
int etnaviv_gem_prime_vmap(struct drm_gem_object *obj, struct iosys_map *map)
|
2015-12-03 18:21:29 +01:00
|
|
|
{
|
2020-11-03 10:30:11 +01:00
|
|
|
void *vaddr;
|
|
|
|
|
|
|
|
vaddr = etnaviv_gem_vmap(obj);
|
|
|
|
if (!vaddr)
|
|
|
|
return -ENOMEM;
|
2022-02-04 09:05:41 -08:00
|
|
|
iosys_map_set_vaddr(map, vaddr);
|
2020-11-03 10:30:11 +01:00
|
|
|
|
|
|
|
return 0;
|
2015-12-03 18:21:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
int etnaviv_gem_prime_pin(struct drm_gem_object *obj)
|
|
|
|
{
|
2025-03-17 14:06:40 +01:00
|
|
|
if (!drm_gem_is_imported(obj)) {
|
2015-12-03 18:21:29 +01:00
|
|
|
struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
|
|
|
|
|
|
|
|
mutex_lock(&etnaviv_obj->lock);
|
|
|
|
etnaviv_gem_get_pages(etnaviv_obj);
|
|
|
|
mutex_unlock(&etnaviv_obj->lock);
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void etnaviv_gem_prime_unpin(struct drm_gem_object *obj)
|
|
|
|
{
|
2025-03-17 14:06:40 +01:00
|
|
|
if (!drm_gem_is_imported(obj)) {
|
2015-12-03 18:21:29 +01:00
|
|
|
struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
|
|
|
|
|
|
|
|
mutex_lock(&etnaviv_obj->lock);
|
|
|
|
etnaviv_gem_put_pages(to_etnaviv_bo(obj));
|
|
|
|
mutex_unlock(&etnaviv_obj->lock);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void etnaviv_gem_prime_release(struct etnaviv_gem_object *etnaviv_obj)
|
|
|
|
{
|
2022-02-04 09:05:41 -08:00
|
|
|
struct iosys_map map = IOSYS_MAP_INIT_VADDR(etnaviv_obj->vaddr);
|
2020-09-25 13:56:00 +02:00
|
|
|
|
2015-12-03 18:21:29 +01:00
|
|
|
if (etnaviv_obj->vaddr)
|
2025-07-15 17:58:13 +02:00
|
|
|
dma_buf_vunmap_unlocked(etnaviv_obj->base.import_attach->dmabuf, &map);
|
2015-12-03 18:21:29 +01:00
|
|
|
|
|
|
|
/* Don't drop the pages for imported dmabuf, as they are not
|
|
|
|
* ours, just free the array we allocated:
|
|
|
|
*/
|
2021-01-25 11:27:27 +08:00
|
|
|
kvfree(etnaviv_obj->pages);
|
2015-12-03 18:21:29 +01:00
|
|
|
|
|
|
|
drm_prime_gem_destroy(&etnaviv_obj->base, etnaviv_obj->sgt);
|
|
|
|
}
|
|
|
|
|
2016-01-25 15:47:28 +01:00
|
|
|
static void *etnaviv_gem_prime_vmap_impl(struct etnaviv_gem_object *etnaviv_obj)
|
|
|
|
{
|
2022-02-04 09:05:41 -08:00
|
|
|
struct iosys_map map;
|
2020-09-25 13:55:59 +02:00
|
|
|
int ret;
|
|
|
|
|
2016-01-25 15:47:28 +01:00
|
|
|
lockdep_assert_held(&etnaviv_obj->lock);
|
|
|
|
|
2025-07-15 17:58:13 +02:00
|
|
|
ret = dma_buf_vmap(etnaviv_obj->base.import_attach->dmabuf, &map);
|
2020-09-25 13:55:59 +02:00
|
|
|
if (ret)
|
|
|
|
return NULL;
|
|
|
|
return map.vaddr;
|
2016-01-25 15:47:28 +01:00
|
|
|
}
|
|
|
|
|
2016-04-27 12:27:02 +02:00
|
|
|
static int etnaviv_gem_prime_mmap_obj(struct etnaviv_gem_object *etnaviv_obj,
|
|
|
|
struct vm_area_struct *vma)
|
|
|
|
{
|
2023-02-24 18:21:54 +01:00
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = dma_buf_mmap(etnaviv_obj->base.dma_buf, vma, 0);
|
|
|
|
if (!ret) {
|
|
|
|
/* Drop the reference acquired by drm_gem_mmap_obj(). */
|
|
|
|
drm_gem_object_put(&etnaviv_obj->base);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
2016-04-27 12:27:02 +02:00
|
|
|
}
|
|
|
|
|
2015-12-03 18:21:29 +01:00
|
|
|
static const struct etnaviv_gem_ops etnaviv_gem_prime_ops = {
|
|
|
|
/* .get_pages should never be called */
|
|
|
|
.release = etnaviv_gem_prime_release,
|
2016-01-25 15:47:28 +01:00
|
|
|
.vmap = etnaviv_gem_prime_vmap_impl,
|
2016-04-27 12:27:02 +02:00
|
|
|
.mmap = etnaviv_gem_prime_mmap_obj,
|
2015-12-03 18:21:29 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
struct drm_gem_object *etnaviv_gem_prime_import_sg_table(struct drm_device *dev,
|
|
|
|
struct dma_buf_attachment *attach, struct sg_table *sgt)
|
|
|
|
{
|
|
|
|
struct etnaviv_gem_object *etnaviv_obj;
|
|
|
|
size_t size = PAGE_ALIGN(attach->dmabuf->size);
|
|
|
|
int ret, npages;
|
|
|
|
|
|
|
|
ret = etnaviv_gem_new_private(dev, size, ETNA_BO_WC,
|
|
|
|
&etnaviv_gem_prime_ops, &etnaviv_obj);
|
|
|
|
if (ret < 0)
|
|
|
|
return ERR_PTR(ret);
|
|
|
|
|
2017-11-17 11:14:58 +01:00
|
|
|
lockdep_set_class(&etnaviv_obj->lock, &etnaviv_prime_lock_class);
|
|
|
|
|
2015-12-03 18:21:29 +01:00
|
|
|
npages = size / PAGE_SIZE;
|
|
|
|
|
|
|
|
etnaviv_obj->sgt = sgt;
|
2017-05-17 14:23:12 +02:00
|
|
|
etnaviv_obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);
|
2015-12-03 18:21:29 +01:00
|
|
|
if (!etnaviv_obj->pages) {
|
|
|
|
ret = -ENOMEM;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2020-10-08 12:57:32 +02:00
|
|
|
ret = drm_prime_sg_to_page_array(sgt, etnaviv_obj->pages, npages);
|
2015-12-03 18:21:29 +01:00
|
|
|
if (ret)
|
|
|
|
goto fail;
|
|
|
|
|
2017-11-17 12:17:14 +01:00
|
|
|
etnaviv_gem_obj_add(dev, &etnaviv_obj->base);
|
2015-12-03 18:21:29 +01:00
|
|
|
|
|
|
|
return &etnaviv_obj->base;
|
|
|
|
|
|
|
|
fail:
|
2020-05-15 10:50:57 +01:00
|
|
|
drm_gem_object_put(&etnaviv_obj->base);
|
2015-12-03 18:21:29 +01:00
|
|
|
|
|
|
|
return ERR_PTR(ret);
|
|
|
|
}
|