|
@@ -28,148 +28,6 @@
|
|
|
/******************************************************************************
|
|
|
* instmem object base implementation
|
|
|
*****************************************************************************/
|
|
|
-#define nvkm_instobj(p) container_of((p), struct nvkm_instobj, memory)
|
|
|
-
|
|
|
-static enum nvkm_memory_target
|
|
|
-nvkm_instobj_target(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- memory = nvkm_instobj(memory)->parent;
|
|
|
- return nvkm_memory_target(memory);
|
|
|
-}
|
|
|
-
|
|
|
-static u64
|
|
|
-nvkm_instobj_addr(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- memory = nvkm_instobj(memory)->parent;
|
|
|
- return nvkm_memory_addr(memory);
|
|
|
-}
|
|
|
-
|
|
|
-static u64
|
|
|
-nvkm_instobj_size(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- memory = nvkm_instobj(memory)->parent;
|
|
|
- return nvkm_memory_size(memory);
|
|
|
-}
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_release(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- nvkm_bar_flush(iobj->imem->subdev.device->bar);
|
|
|
-}
|
|
|
-
|
|
|
-static void __iomem *
|
|
|
-nvkm_instobj_acquire(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- return nvkm_instobj(memory)->map;
|
|
|
-}
|
|
|
-
|
|
|
-static u32
|
|
|
-nvkm_instobj_rd32(struct nvkm_memory *memory, u64 offset)
|
|
|
-{
|
|
|
- return ioread32_native(nvkm_instobj(memory)->map + offset);
|
|
|
-}
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_wr32(struct nvkm_memory *memory, u64 offset, u32 data)
|
|
|
-{
|
|
|
- iowrite32_native(data, nvkm_instobj(memory)->map + offset);
|
|
|
-}
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_map(struct nvkm_memory *memory, struct nvkm_vma *vma, u64 offset)
|
|
|
-{
|
|
|
- memory = nvkm_instobj(memory)->parent;
|
|
|
- nvkm_memory_map(memory, vma, offset);
|
|
|
-}
|
|
|
-
|
|
|
-static void *
|
|
|
-nvkm_instobj_dtor_old(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- spin_lock(&iobj->imem->lock);
|
|
|
- list_del(&iobj->head);
|
|
|
- spin_unlock(&iobj->imem->lock);
|
|
|
- nvkm_memory_del(&iobj->parent);
|
|
|
- return iobj;
|
|
|
-}
|
|
|
-
|
|
|
-static const struct nvkm_memory_func
|
|
|
-nvkm_instobj_func = {
|
|
|
- .dtor = nvkm_instobj_dtor_old,
|
|
|
- .target = nvkm_instobj_target,
|
|
|
- .addr = nvkm_instobj_addr,
|
|
|
- .size = nvkm_instobj_size,
|
|
|
- .acquire = nvkm_instobj_acquire,
|
|
|
- .release = nvkm_instobj_release,
|
|
|
- .map = nvkm_instobj_map,
|
|
|
-};
|
|
|
-
|
|
|
-static const struct nvkm_memory_ptrs
|
|
|
-nvkm_instobj_ptrs = {
|
|
|
- .rd32 = nvkm_instobj_rd32,
|
|
|
- .wr32 = nvkm_instobj_wr32,
|
|
|
-};
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_boot(struct nvkm_memory *memory, struct nvkm_vm *vm)
|
|
|
-{
|
|
|
- memory = nvkm_instobj(memory)->parent;
|
|
|
- nvkm_memory_boot(memory, vm);
|
|
|
-}
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_release_slow(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- nvkm_instobj_release(memory);
|
|
|
- nvkm_done(iobj->parent);
|
|
|
-}
|
|
|
-
|
|
|
-static void __iomem *
|
|
|
-nvkm_instobj_acquire_slow(struct nvkm_memory *memory)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- iobj->map = nvkm_kmap(iobj->parent);
|
|
|
- if (iobj->map) {
|
|
|
- memory->func = &nvkm_instobj_func;
|
|
|
- memory->ptrs = &nvkm_instobj_ptrs;
|
|
|
- }
|
|
|
- return iobj->map;
|
|
|
-}
|
|
|
-
|
|
|
-static u32
|
|
|
-nvkm_instobj_rd32_slow(struct nvkm_memory *memory, u64 offset)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- return nvkm_ro32(iobj->parent, offset);
|
|
|
-}
|
|
|
-
|
|
|
-static void
|
|
|
-nvkm_instobj_wr32_slow(struct nvkm_memory *memory, u64 offset, u32 data)
|
|
|
-{
|
|
|
- struct nvkm_instobj *iobj = nvkm_instobj(memory);
|
|
|
- return nvkm_wo32(iobj->parent, offset, data);
|
|
|
-}
|
|
|
-
|
|
|
-static const struct nvkm_memory_func
|
|
|
-nvkm_instobj_func_slow = {
|
|
|
- .dtor = nvkm_instobj_dtor_old,
|
|
|
- .target = nvkm_instobj_target,
|
|
|
- .addr = nvkm_instobj_addr,
|
|
|
- .size = nvkm_instobj_size,
|
|
|
- .boot = nvkm_instobj_boot,
|
|
|
- .acquire = nvkm_instobj_acquire_slow,
|
|
|
- .release = nvkm_instobj_release_slow,
|
|
|
- .map = nvkm_instobj_map,
|
|
|
-};
|
|
|
-
|
|
|
-static const struct nvkm_memory_ptrs
|
|
|
-nvkm_instobj_ptrs_slow = {
|
|
|
- .rd32 = nvkm_instobj_rd32_slow,
|
|
|
- .wr32 = nvkm_instobj_wr32_slow,
|
|
|
-};
|
|
|
-
|
|
|
void
|
|
|
nvkm_instobj_dtor(struct nvkm_instmem *imem, struct nvkm_instobj *iobj)
|
|
|
{
|
|
@@ -183,7 +41,6 @@ nvkm_instobj_ctor(const struct nvkm_memory_func *func,
|
|
|
struct nvkm_instmem *imem, struct nvkm_instobj *iobj)
|
|
|
{
|
|
|
nvkm_memory_ctor(func, &iobj->memory);
|
|
|
- iobj->parent = &iobj->memory;
|
|
|
iobj->suspend = NULL;
|
|
|
spin_lock(&imem->lock);
|
|
|
list_add_tail(&iobj->head, &imem->list);
|
|
@@ -196,7 +53,6 @@ nvkm_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero,
|
|
|
{
|
|
|
struct nvkm_subdev *subdev = &imem->subdev;
|
|
|
struct nvkm_memory *memory = NULL;
|
|
|
- struct nvkm_instobj *iobj;
|
|
|
u32 offset;
|
|
|
int ret;
|
|
|
|
|
@@ -209,22 +65,6 @@ nvkm_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero,
|
|
|
nvkm_trace(subdev, "new %08x %08x %d: %010llx %010llx\n", size, align,
|
|
|
zero, nvkm_memory_addr(memory), nvkm_memory_size(memory));
|
|
|
|
|
|
- if (!imem->func->persistent) {
|
|
|
- if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL))) {
|
|
|
- ret = -ENOMEM;
|
|
|
- goto done;
|
|
|
- }
|
|
|
-
|
|
|
- nvkm_memory_ctor(&nvkm_instobj_func_slow, &iobj->memory);
|
|
|
- iobj->memory.ptrs = &nvkm_instobj_ptrs_slow;
|
|
|
- iobj->parent = memory;
|
|
|
- iobj->imem = imem;
|
|
|
- spin_lock(&iobj->imem->lock);
|
|
|
- list_add_tail(&iobj->head, &imem->list);
|
|
|
- spin_unlock(&iobj->imem->lock);
|
|
|
- memory = &iobj->memory;
|
|
|
- }
|
|
|
-
|
|
|
if (!imem->func->zero && zero) {
|
|
|
void __iomem *map = nvkm_kmap(memory);
|
|
|
if (unlikely(!map)) {
|
|
@@ -271,7 +111,7 @@ nvkm_instmem_fini(struct nvkm_subdev *subdev, bool suspend)
|
|
|
|
|
|
if (suspend) {
|
|
|
list_for_each_entry(iobj, &imem->list, head) {
|
|
|
- struct nvkm_memory *memory = iobj->parent;
|
|
|
+ struct nvkm_memory *memory = &iobj->memory;
|
|
|
u64 size = nvkm_memory_size(memory);
|
|
|
|
|
|
iobj->suspend = vmalloc(size);
|
|
@@ -304,7 +144,7 @@ nvkm_instmem_init(struct nvkm_subdev *subdev)
|
|
|
|
|
|
list_for_each_entry(iobj, &imem->list, head) {
|
|
|
if (iobj->suspend) {
|
|
|
- struct nvkm_memory *memory = iobj->parent;
|
|
|
+ struct nvkm_memory *memory = &iobj->memory;
|
|
|
u64 size = nvkm_memory_size(memory);
|
|
|
for (i = 0; i < size; i += 4)
|
|
|
nvkm_wo32(memory, i, iobj->suspend[i / 4]);
|