drivers/gpu/drm/i915/display/intel_dpt.c
264
if (IS_ERR(dpt_obj) && i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/display/intel_dpt.c
307
vm->pte_encode = vm->gt->ggtt->vm.pte_encode;
drivers/gpu/drm/i915/display/intel_fbdev_fb.c
76
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/display/intel_fbdev_fb.c
80
(unsigned long)(ggtt->gmadr.start + i915_ggtt_offset(vma));
drivers/gpu/drm/i915/gem/i915_gem_context.h
176
vm = &to_gt(ctx->i915)->ggtt->vm;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1135
return to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1167
struct i915_ggtt *ggtt = cache_to_ggtt(cache);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1175
io_mapping_map_atomic_wc(&ggtt->iomap, offset);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1196
struct i915_ggtt *ggtt = cache_to_ggtt(cache);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1198
intel_gt_flush_ggtt_writes(ggtt->vm.gt);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1202
ggtt->vm.clear_range(&ggtt->vm,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1205
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1207
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1260
struct i915_ggtt *ggtt = cache_to_ggtt(cache);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1265
intel_gt_flush_ggtt_writes(ggtt->vm.gt);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1301
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1303
(&ggtt->vm.mm, &cache->node,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1305
0, ggtt->mappable_end,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1307
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1318
ggtt->vm.insert_page(&ggtt->vm,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1321
i915_gem_get_pat_index(ggtt->vm.i915,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1328
vaddr = (void __force *)io_mapping_map_atomic_wc(&ggtt->iomap,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2326
&eb->gt->ggtt->vm,
drivers/gpu/drm/i915/gem/i915_gem_mman.c
1122
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/i915_gem_mman.c
1136
mmap_type = i915_ggtt_has_aperture(ggtt) ? I915_MMAP_TYPE_GTT : I915_MMAP_TYPE_WC;
drivers/gpu/drm/i915/gem/i915_gem_mman.c
347
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/i915_gem_mman.c
383
ret = intel_gt_reset_lock_interruptible(ggtt->vm.gt, &srcu);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
420
ret = mutex_lock_interruptible(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
422
ret = i915_gem_evict_vm(&ggtt->vm, &ww, NULL);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
423
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
459
set_address_limits(area, vma, obj_offset, ggtt->gmadr.start,
drivers/gpu/drm/i915/gem/i915_gem_mman.c
463
ret = remap_io_mapping(area, start, pfn, end - start, &ggtt->iomap);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
470
mutex_lock(&to_gt(i915)->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
472
list_add(&obj->userfault_link, &to_gt(i915)->ggtt->userfault_list);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
473
mutex_unlock(&to_gt(i915)->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
493
intel_gt_reset_unlock(ggtt->vm.gt, srcu);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
594
mutex_lock(&to_gt(i915)->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
612
mutex_unlock(&to_gt(i915)->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
841
else if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/gem/i915_gem_mman.c
889
if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/gem/i915_gem_region.c
133
!i915_ggtt_has_aperture(to_gt(mem->i915)->ggtt))
drivers/gpu/drm/i915/gem/i915_gem_shrinker.c
406
mutex_lock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_shrinker.c
408
>->ggtt->vm.bound_list, vm_link) {
drivers/gpu/drm/i915/gem/i915_gem_shrinker.c
423
mutex_unlock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
113
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
114
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
138
ggtt_res = DEFINE_RES_MEM(ggtt_start, ggtt_total_entries(ggtt) * 4);
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
597
static void dbg_poison(struct i915_ggtt *ggtt,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
602
if (!drm_mm_node_allocated(&ggtt->error_capture))
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
605
if (ggtt->vm.bind_async_flags & I915_VMA_GLOBAL_BIND)
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
610
mutex_lock(&ggtt->error_mutex);
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
614
ggtt->vm.insert_page(&ggtt->vm, addr,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
615
ggtt->error_capture.start,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
616
i915_gem_get_pat_index(ggtt->vm.i915,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
621
s = io_mapping_map_wc(&ggtt->iomap,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
622
ggtt->error_capture.start,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
631
ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE);
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
632
mutex_unlock(&ggtt->error_mutex);
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
680
dbg_poison(to_gt(i915)->ggtt,
drivers/gpu/drm/i915/gem/i915_gem_stolen.c
696
dbg_poison(to_gt(i915)->ggtt,
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
187
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
195
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
199
GEM_BUG_ON(vma->vm != &ggtt->vm);
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
212
list_splice(&unbind, &ggtt->vm.bound_list);
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
217
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
226
return to_gt(i915)->ggtt->bit_6_swizzle_x == I915_BIT_6_SWIZZLE_9_10_17 &&
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
351
if (!to_gt(i915)->ggtt->num_fences)
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
377
args->swizzle_mode = to_gt(i915)->ggtt->bit_6_swizzle_x;
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
379
args->swizzle_mode = to_gt(i915)->ggtt->bit_6_swizzle_y;
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
434
if (!to_gt(i915)->ggtt->num_fences)
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
450
args->swizzle_mode = to_gt(i915)->ggtt->bit_6_swizzle_x;
drivers/gpu/drm/i915/gem/i915_gem_tiling.c
453
args->swizzle_mode = to_gt(i915)->ggtt->bit_6_swizzle_y;
drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c
371
swizzle = gt->ggtt->bit_6_swizzle_x;
drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c
387
swizzle = gt->ggtt->bit_6_swizzle_y;
drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c
693
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c
698
if (has_bit17_swizzle(ggtt->bit_6_swizzle_x) ||
drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c
699
has_bit17_swizzle(ggtt->bit_6_swizzle_y))
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
264
return gt->ggtt->num_fences;
drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c
1404
vm = ctx->vm ?: &to_gt(i915)->ggtt->alias->vm;
drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c
1656
vm = i915_vm_get(&engine->gt->ggtt->vm);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
326
if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
339
(1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
385
tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x;
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
388
tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y;
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
459
if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
476
(1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
505
tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x;
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
508
tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y;
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
553
vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
884
!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c
46
to_gt(i915)->ggtt->vm.total + PAGE_SIZE);
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
166
gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt);
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
286
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
291
ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset;
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
382
struct i915_ggtt * const ggtt = ppgtt->base.vm.gt->ggtt;
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
402
ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
430
struct i915_ggtt * const ggtt = gt->ggtt;
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
452
ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1046
if (!HAS_LLC(engine->i915) && i915_ggtt_has_aperture(engine->gt->ggtt))
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1090
vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1001
ppgtt = i915_ppgtt_create(ggtt->vm.gt, 0);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1005
if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
101
ret = ggtt_init_hw(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1010
err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1026
ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1028
ggtt->alias = ppgtt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1029
ggtt->vm.bind_async_flags |= ppgtt->vm.bind_async_flags;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1031
GEM_BUG_ON(ggtt->vm.vma_ops.bind_vma != intel_ggtt_bind_vma);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1032
ggtt->vm.vma_ops.bind_vma = aliasing_gtt_bind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1034
GEM_BUG_ON(ggtt->vm.vma_ops.unbind_vma != intel_ggtt_unbind_vma);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1035
ggtt->vm.vma_ops.unbind_vma = aliasing_gtt_unbind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1047
static void fini_aliasing_ppgtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1051
ppgtt = fetch_and_zero(&ggtt->alias);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1057
ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1058
ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1065
ret = init_ggtt(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1070
ret = init_aliasing_ppgtt(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1072
cleanup_init_ggtt(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1078
static void ggtt_cleanup_hw(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1082
flush_workqueue(ggtt->vm.i915->wq);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1083
i915_gem_drain_freed_objects(ggtt->vm.i915);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1085
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1087
ggtt->vm.skip_pte_rewrite = true;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1089
list_for_each_entry_safe(vma, vn, &ggtt->vm.bound_list, vm_link) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
1101
if (drm_mm_node_allocated(&ggtt->error_capture))
drivers/gpu/drm/i915/gt/intel_ggtt.c
1102
drm_mm_remove_node(&ggtt->error_capture);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1103
mutex_destroy(&ggtt->error_mutex);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1105
ggtt_release_guc_top(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1106
intel_vgt_deballoon(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1108
ggtt->vm.cleanup(&ggtt->vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1110
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1111
i915_address_space_fini(&ggtt->vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1113
arch_phys_wc_del(ggtt->mtrr);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1115
if (ggtt->iomap.size)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1116
io_mapping_fini(&ggtt->iomap);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1125
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1127
fini_aliasing_ppgtt(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1129
intel_ggtt_fini_fences(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1130
ggtt_cleanup_hw(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1140
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1142
GEM_WARN_ON(kref_read(&ggtt->vm.resv_ref) != 1);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1143
dma_resv_fini(&ggtt->vm._resv);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1195
static int ggtt_probe_common(struct i915_ggtt *ggtt, u64 size)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1197
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1198
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1214
ggtt->gsm = ioremap_wc(phys_addr, size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1216
ggtt->gsm = ioremap(phys_addr, size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1218
if (!ggtt->gsm) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
1223
kref_init(&ggtt->vm.resv_ref);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1224
ret = setup_scratch_page(&ggtt->vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1228
iounmap(ggtt->gsm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1233
if (i915_gem_object_is_lmem(ggtt->vm.scratch[0]))
drivers/gpu/drm/i915/gt/intel_ggtt.c
1236
ggtt->vm.scratch[0]->encode =
drivers/gpu/drm/i915/gt/intel_ggtt.c
1237
ggtt->vm.pte_encode(px_dma(ggtt->vm.scratch[0]),
drivers/gpu/drm/i915/gt/intel_ggtt.c
1247
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1249
iounmap(ggtt->gsm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1259
static int gen8_gmch_probe(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1261
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1270
ggtt->gmadr = pci_resource(pdev, GEN4_GMADR_BAR);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1271
ggtt->mappable_end = resource_size(&ggtt->gmadr);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1280
ggtt->vm.alloc_pt_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1281
ggtt->vm.alloc_scratch_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1282
ggtt->vm.lmem_pt_obj_flags = I915_BO_ALLOC_PM_EARLY;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1284
ggtt->vm.total = (size / sizeof(gen8_pte_t)) * I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1285
ggtt->vm.cleanup = gen6_gmch_remove;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1286
ggtt->vm.insert_page = gen8_ggtt_insert_page;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1287
ggtt->vm.clear_range = nop_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1288
ggtt->vm.scratch_range = gen8_ggtt_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1290
ggtt->vm.insert_entries = gen8_ggtt_insert_entries;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1291
ggtt->vm.read_entry = gen8_ggtt_read_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1298
ggtt->vm.insert_entries = bxt_vtd_ggtt_insert_entries__BKL;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1299
ggtt->vm.insert_page = bxt_vtd_ggtt_insert_page__BKL;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1307
ggtt->vm.raw_insert_page = gen8_ggtt_insert_page;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1308
ggtt->vm.raw_insert_entries = gen8_ggtt_insert_entries;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1310
ggtt->vm.bind_async_flags =
drivers/gpu/drm/i915/gt/intel_ggtt.c
1315
ggtt->vm.scratch_range = gen8_ggtt_scratch_range_bind;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1316
ggtt->vm.insert_page = gen8_ggtt_insert_page_bind;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1317
ggtt->vm.insert_entries = gen8_ggtt_insert_entries_bind;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1322
ggtt->vm.raw_insert_page = gen8_ggtt_insert_page;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1325
if (intel_uc_wants_guc_submission(&ggtt->vm.gt->uc))
drivers/gpu/drm/i915/gt/intel_ggtt.c
1326
ggtt->invalidate = guc_ggtt_invalidate;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1328
ggtt->invalidate = gen8_ggtt_invalidate;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1330
ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1331
ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1334
ggtt->vm.pte_encode = mtl_ggtt_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1336
ggtt->vm.pte_encode = gen8_ggtt_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1338
ggtt->vm.pte_decode = gen8_ggtt_pte_decode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1340
return ggtt_probe_common(ggtt, size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1447
static int gen6_gmch_probe(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1449
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1457
ggtt->gmadr = pci_resource(pdev, GEN4_GMADR_BAR);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1458
ggtt->mappable_end = resource_size(&ggtt->gmadr);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1464
if (ggtt->mappable_end < (64 << 20) ||
drivers/gpu/drm/i915/gt/intel_ggtt.c
1465
ggtt->mappable_end > (512 << 20)) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
1467
&ggtt->mappable_end);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1474
ggtt->vm.total = (size / sizeof(gen6_pte_t)) * I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1476
ggtt->vm.alloc_pt_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1477
ggtt->vm.alloc_scratch_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1479
ggtt->vm.clear_range = nop_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1481
ggtt->vm.clear_range = gen6_ggtt_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1482
ggtt->vm.scratch_range = gen6_ggtt_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1483
ggtt->vm.insert_page = gen6_ggtt_insert_page;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1484
ggtt->vm.insert_entries = gen6_ggtt_insert_entries;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1485
ggtt->vm.read_entry = gen6_ggtt_read_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1486
ggtt->vm.cleanup = gen6_gmch_remove;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1488
ggtt->invalidate = gen6_ggtt_invalidate;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1491
ggtt->vm.pte_encode = iris_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1493
ggtt->vm.pte_encode = hsw_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1495
ggtt->vm.pte_encode = byt_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1497
ggtt->vm.pte_encode = ivb_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1499
ggtt->vm.pte_encode = snb_pte_encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1501
ggtt->vm.pte_decode = gen6_pte_decode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1503
ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1504
ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1506
return ggtt_probe_common(ggtt, size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1509
static int ggtt_probe_hw(struct i915_ggtt *ggtt, struct intel_gt *gt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1514
ggtt->vm.gt = gt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1515
ggtt->vm.i915 = i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1516
ggtt->vm.dma = i915->drm.dev;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1517
dma_resv_init(&ggtt->vm._resv);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1520
ret = gen8_gmch_probe(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1522
ret = gen6_gmch_probe(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1524
ret = intel_ggtt_gmch_probe(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1527
dma_resv_fini(&ggtt->vm._resv);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1531
if ((ggtt->vm.total - 1) >> 32) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
1535
ggtt->vm.total >> 20);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1536
ggtt->vm.total = 1ULL << 32;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1537
ggtt->mappable_end =
drivers/gpu/drm/i915/gt/intel_ggtt.c
1538
min_t(u64, ggtt->mappable_end, ggtt->vm.total);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1541
if (ggtt->mappable_end > ggtt->vm.total) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
1545
&ggtt->mappable_end, ggtt->vm.total);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1546
ggtt->mappable_end = ggtt->vm.total;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1550
drm_dbg(&i915->drm, "GGTT size = %lluM\n", ggtt->vm.total >> 20);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1552
(u64)ggtt->mappable_end >> 20);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1574
ret = ggtt_probe_hw(to_gt(i915)->ggtt, to_gt(i915));
drivers/gpu/drm/i915/gt/intel_ggtt.c
1586
struct i915_ggtt *ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1588
ggtt = drmm_kzalloc(&i915->drm, sizeof(*ggtt), GFP_KERNEL);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1589
if (!ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1592
INIT_LIST_HEAD(&ggtt->gt_list);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1594
return ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
1659
void i915_ggtt_resume(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1664
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1667
flush = i915_ggtt_resume_vm(&ggtt->vm, false);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1669
if (drm_mm_node_allocated(&ggtt->error_capture))
drivers/gpu/drm/i915/gt/intel_ggtt.c
1670
ggtt->vm.scratch_range(&ggtt->vm, ggtt->error_capture.start,
drivers/gpu/drm/i915/gt/intel_ggtt.c
1671
ggtt->error_capture.size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1673
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/gt/intel_ggtt.c
1676
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
1681
intel_ggtt_restore_fences(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
181
void i915_ggtt_suspend(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
185
i915_ggtt_suspend_vm(&ggtt->vm, false);
drivers/gpu/drm/i915/gt/intel_ggtt.c
186
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
188
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/gt/intel_ggtt.c
192
void gen6_ggtt_invalidate(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
194
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt.c
217
static void gen8_ggtt_invalidate(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
219
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt.c
227
if (needs_wc_ggtt_mapping(ggtt->vm.i915))
drivers/gpu/drm/i915/gt/intel_ggtt.c
241
static void guc_ggtt_invalidate(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
243
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
246
gen8_ggtt_invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
248
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
301
static bool should_update_ggtt_with_bind(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
303
struct intel_gt *gt = ggtt->vm.gt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
308
static struct intel_context *gen8_ggtt_bind_get_ce(struct i915_ggtt *ggtt, intel_wakeref_t *wakeref)
drivers/gpu/drm/i915/gt/intel_ggtt.c
311
struct intel_gt *gt = ggtt->vm.gt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
340
static bool gen8_ggtt_bind_ptes(struct i915_ggtt *ggtt, u32 offset,
drivers/gpu/drm/i915/gt/intel_ggtt.c
345
struct intel_gt *gt = ggtt->vm.gt;
drivers/gpu/drm/i915/gt/intel_ggtt.c
346
const gen8_pte_t scratch_pte = ggtt->vm.scratch[0]->encode;
drivers/gpu/drm/i915/gt/intel_ggtt.c
356
ce = gen8_ggtt_bind_get_ce(ggtt, &wakeref);
drivers/gpu/drm/i915/gt/intel_ggtt.c
458
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
460
(gen8_pte_t __iomem *)ggtt->gsm + offset / I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
462
gen8_set_pte(pte, ggtt->vm.pte_encode(addr, pat_index, flags));
drivers/gpu/drm/i915/gt/intel_ggtt.c
464
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
470
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
472
(gen8_pte_t __iomem *)ggtt->gsm + offset / I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
474
return ggtt->vm.pte_decode(gen8_get_pte(pte), is_present, is_local);
drivers/gpu/drm/i915/gt/intel_ggtt.c
481
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
484
pte = ggtt->vm.pte_encode(addr, pat_index, flags);
drivers/gpu/drm/i915/gt/intel_ggtt.c
486
gen8_ggtt_bind_ptes(ggtt, offset, NULL, 1, pte))
drivers/gpu/drm/i915/gt/intel_ggtt.c
487
return ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
497
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
498
const gen8_pte_t pte_encode = ggtt->vm.pte_encode(0, pat_index, flags);
drivers/gpu/drm/i915/gt/intel_ggtt.c
509
gte = (gen8_pte_t __iomem *)ggtt->gsm;
drivers/gpu/drm/i915/gt/intel_ggtt.c
528
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
535
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
540
pte_encode = ggtt->vm.pte_encode(0, pat_index, flags);
drivers/gpu/drm/i915/gt/intel_ggtt.c
543
if (!gen8_ggtt_bind_ptes(ggtt, start, NULL, end - start, scratch_pte))
drivers/gpu/drm/i915/gt/intel_ggtt.c
548
if (!gen8_ggtt_bind_ptes(ggtt, start, vma_res->bi.pages,
drivers/gpu/drm/i915/gt/intel_ggtt.c
553
if (!gen8_ggtt_bind_ptes(ggtt, start, NULL, end - start, scratch_pte))
drivers/gpu/drm/i915/gt/intel_ggtt.c
56
static int ggtt_init_hw(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
566
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
570
return ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
578
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
58
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt.c
583
(gen8_pte_t __iomem *)ggtt->gsm + first_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
584
const int max_entries = ggtt_total_entries(ggtt) - first_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
599
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
60
i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT);
drivers/gpu/drm/i915/gt/intel_ggtt.c
603
const int max_entries = ggtt_total_entries(ggtt) - first_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
610
if (should_update_ggtt_with_bind(ggtt) && gen8_ggtt_bind_ptes(ggtt, first_entry,
drivers/gpu/drm/i915/gt/intel_ggtt.c
612
return ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
62
ggtt->vm.is_ggtt = true;
drivers/gpu/drm/i915/gt/intel_ggtt.c
623
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
625
(gen6_pte_t __iomem *)ggtt->gsm + offset / I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
629
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
636
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
638
(gen6_pte_t __iomem *)ggtt->gsm + offset / I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
65
ggtt->vm.has_read_only = IS_VALLEYVIEW(i915);
drivers/gpu/drm/i915/gt/intel_ggtt.c
654
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
660
gte = (gen6_pte_t __iomem *)ggtt->gsm;
drivers/gpu/drm/i915/gt/intel_ggtt.c
679
ggtt->invalidate(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
68
ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust;
drivers/gpu/drm/i915/gt/intel_ggtt.c
70
if (ggtt->mappable_end) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
71
if (!io_mapping_init_wc(&ggtt->iomap,
drivers/gpu/drm/i915/gt/intel_ggtt.c
72
ggtt->gmadr.start,
drivers/gpu/drm/i915/gt/intel_ggtt.c
73
ggtt->mappable_end)) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
74
ggtt->vm.cleanup(&ggtt->vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
759
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
763
(gen6_pte_t __iomem *)ggtt->gsm + first_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
764
const int max_entries = ggtt_total_entries(ggtt) - first_entry;
drivers/gpu/drm/i915/gt/intel_ggtt.c
78
ggtt->mtrr = arch_phys_wc_add(ggtt->gmadr.start,
drivers/gpu/drm/i915/gt/intel_ggtt.c
79
ggtt->mappable_end);
drivers/gpu/drm/i915/gt/intel_ggtt.c
810
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/gt/intel_ggtt.c
812
return ggtt->vm.read_entry(vm, offset, is_present, is_local);
drivers/gpu/drm/i915/gt/intel_ggtt.c
82
intel_ggtt_init_fences(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
826
static int ggtt_reserve_guc_top(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
831
if (!intel_uc_uses_guc(&ggtt->vm.gt->uc))
drivers/gpu/drm/i915/gt/intel_ggtt.c
834
GEM_BUG_ON(ggtt->vm.total <= GUC_TOP_RESERVE_SIZE);
drivers/gpu/drm/i915/gt/intel_ggtt.c
835
offset = ggtt->vm.total - GUC_TOP_RESERVE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
837
ret = i915_gem_gtt_reserve(&ggtt->vm, NULL, &ggtt->uc_fw,
drivers/gpu/drm/i915/gt/intel_ggtt.c
841
drm_dbg(&ggtt->vm.i915->drm,
drivers/gpu/drm/i915/gt/intel_ggtt.c
847
static void ggtt_release_guc_top(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
849
if (drm_mm_node_allocated(&ggtt->uc_fw))
drivers/gpu/drm/i915/gt/intel_ggtt.c
850
drm_mm_remove_node(&ggtt->uc_fw);
drivers/gpu/drm/i915/gt/intel_ggtt.c
853
static void cleanup_init_ggtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
855
ggtt_release_guc_top(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
856
if (drm_mm_node_allocated(&ggtt->error_capture))
drivers/gpu/drm/i915/gt/intel_ggtt.c
857
drm_mm_remove_node(&ggtt->error_capture);
drivers/gpu/drm/i915/gt/intel_ggtt.c
858
mutex_destroy(&ggtt->error_mutex);
drivers/gpu/drm/i915/gt/intel_ggtt.c
861
static int init_ggtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt.c
883
ggtt->pin_bias = max_t(u32, I915_GTT_PAGE_SIZE,
drivers/gpu/drm/i915/gt/intel_ggtt.c
884
intel_wopcm_guc_size(&ggtt->vm.gt->wopcm));
drivers/gpu/drm/i915/gt/intel_ggtt.c
886
ret = intel_vgt_balloon(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
890
mutex_init(&ggtt->error_mutex);
drivers/gpu/drm/i915/gt/intel_ggtt.c
891
if (ggtt->mappable_end) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
914
ggtt->error_capture.size = 2 * I915_GTT_PAGE_SIZE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
915
ggtt->error_capture.color = I915_COLOR_UNEVICTABLE;
drivers/gpu/drm/i915/gt/intel_ggtt.c
916
if (drm_mm_reserve_node(&ggtt->vm.mm, &ggtt->error_capture))
drivers/gpu/drm/i915/gt/intel_ggtt.c
917
drm_mm_insert_node_in_range(&ggtt->vm.mm,
drivers/gpu/drm/i915/gt/intel_ggtt.c
918
&ggtt->error_capture,
drivers/gpu/drm/i915/gt/intel_ggtt.c
919
ggtt->error_capture.size, 0,
drivers/gpu/drm/i915/gt/intel_ggtt.c
920
ggtt->error_capture.color,
drivers/gpu/drm/i915/gt/intel_ggtt.c
921
0, ggtt->mappable_end,
drivers/gpu/drm/i915/gt/intel_ggtt.c
924
if (drm_mm_node_allocated(&ggtt->error_capture)) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
925
u64 start = ggtt->error_capture.start;
drivers/gpu/drm/i915/gt/intel_ggtt.c
926
u64 size = ggtt->error_capture.size;
drivers/gpu/drm/i915/gt/intel_ggtt.c
928
ggtt->vm.scratch_range(&ggtt->vm, start, size);
drivers/gpu/drm/i915/gt/intel_ggtt.c
929
drm_dbg(&ggtt->vm.i915->drm,
drivers/gpu/drm/i915/gt/intel_ggtt.c
939
ret = ggtt_reserve_guc_top(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
944
drm_mm_for_each_hole(entry, &ggtt->vm.mm, hole_start, hole_end) {
drivers/gpu/drm/i915/gt/intel_ggtt.c
945
drm_dbg(&ggtt->vm.i915->drm,
drivers/gpu/drm/i915/gt/intel_ggtt.c
948
ggtt->vm.clear_range(&ggtt->vm, hole_start,
drivers/gpu/drm/i915/gt/intel_ggtt.c
953
ggtt->vm.clear_range(&ggtt->vm, ggtt->vm.total - PAGE_SIZE, PAGE_SIZE);
drivers/gpu/drm/i915/gt/intel_ggtt.c
958
cleanup_init_ggtt(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt.c
995
static int init_aliasing_ppgtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
205
struct i915_ggtt *ggtt = fence->ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
255
list_move(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
279
list_move_tail(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
331
static struct i915_fence_reg *fence_find(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
333
struct intel_display *display = ggtt->vm.i915->display;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
337
list_for_each_entry_safe(fence, fn, &ggtt->fence_list, link) {
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
348
list_move_tail(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
367
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
380
list_move_tail(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
384
fence = fence_find(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
457
struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
463
lockdep_assert_held(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
467
list_for_each_entry(fence, &ggtt->fence_list, link)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
472
fence = fence_find(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
496
struct i915_ggtt *ggtt = fence->ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
498
lockdep_assert_held(&ggtt->vm.mutex);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
500
list_add(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
511
void intel_ggtt_restore_fences(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
515
for (i = 0; i < ggtt->num_fences; i++)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
516
fence_write(&ggtt->fence_regs[i]);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
53
return fence->ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
574
static void detect_bit_6_swizzle(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
576
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
577
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
58
return fence->ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
740
to_gt(i915)->ggtt->bit_6_swizzle_x = swizzle_x;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
741
to_gt(i915)->ggtt->bit_6_swizzle_y = swizzle_y;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
841
void intel_ggtt_init_fences(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
843
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
844
struct intel_uncore *uncore = ggtt->vm.gt->uncore;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
848
INIT_LIST_HEAD(&ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
849
INIT_LIST_HEAD(&ggtt->userfault_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
851
detect_bit_6_swizzle(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
853
if (!i915_ggtt_has_aperture(ggtt))
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
868
ggtt->fence_regs = kzalloc_objs(*ggtt->fence_regs, num_fences);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
869
if (!ggtt->fence_regs)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
874
struct i915_fence_reg *fence = &ggtt->fence_regs[i];
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
877
fence->ggtt = ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
879
list_add_tail(&fence->link, &ggtt->fence_list);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
881
ggtt->num_fences = num_fences;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
883
intel_ggtt_restore_fences(ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
886
void intel_ggtt_fini_fences(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
890
for (i = 0; i < ggtt->num_fences; i++) {
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
891
struct i915_fence_reg *fence = &ggtt->fence_regs[i];
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
896
kfree(ggtt->fence_regs);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
905
to_gt(i915)->ggtt->bit_6_swizzle_x == I915_BIT_6_SWIZZLE_NONE)
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.h
24
struct i915_ggtt *ggtt;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.h
44
struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.h
47
void intel_ggtt_restore_fences(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.h
54
void intel_ggtt_init_fences(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.h
55
void intel_ggtt_fini_fences(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
101
ggtt->vm.alloc_pt_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
102
ggtt->vm.alloc_scratch_dma = alloc_pt_dma;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
107
ggtt->do_idle_maps = true;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
110
ggtt->vm.insert_page = gmch_ggtt_insert_page;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
111
ggtt->vm.insert_entries = gmch_ggtt_insert_entries;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
112
ggtt->vm.clear_range = gmch_ggtt_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
113
ggtt->vm.scratch_range = gmch_ggtt_clear_range;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
114
ggtt->vm.read_entry = gmch_ggtt_read_entry;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
115
ggtt->vm.cleanup = gmch_ggtt_remove;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
117
ggtt->invalidate = gmch_ggtt_invalidate;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
119
ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
120
ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
122
if (unlikely(ggtt->do_idle_maps))
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
50
static void gmch_ggtt_invalidate(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
85
int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
87
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
97
intel_gmch_gtt_get(&ggtt->vm.total, &gmadr_base, &ggtt->mappable_end);
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.c
99
ggtt->gmadr = DEFINE_RES_MEM(gmadr_base, ggtt->mappable_end);
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.h
16
int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_ggtt_gmch.h
23
static inline int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt) { return -ENODEV; }
drivers/gpu/drm/i915/gt/intel_gt.c
119
gt->ggtt = to_gt(gt->i915)->ggtt;
drivers/gpu/drm/i915/gt/intel_gt.c
121
gt->ggtt = i915_ggtt_create(gt->i915);
drivers/gpu/drm/i915/gt/intel_gt.c
122
if (IS_ERR(gt->ggtt))
drivers/gpu/drm/i915/gt/intel_gt.c
123
return PTR_ERR(gt->ggtt);
drivers/gpu/drm/i915/gt/intel_gt.c
126
list_add_tail(>->ggtt_link, >->ggtt->gt_list);
drivers/gpu/drm/i915/gt/intel_gt.c
482
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_gt.c
511
return i915_vm_get(>->ggtt->vm);
drivers/gpu/drm/i915/gt/intel_gt_pm.c
398
intel_ggtt_restore_fences(gt->ggtt);
drivers/gpu/drm/i915/gt/intel_gt_types.h
103
struct i915_ggtt *ggtt;
drivers/gpu/drm/i915/gt/intel_gt_types.h
322
#define intel_gt_support_legacy_fencing(gt) ((gt)->ggtt->num_fences > 0)
drivers/gpu/drm/i915/gt/intel_gtt.h
378
void (*invalidate)(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_gtt.h
607
static inline bool i915_ggtt_has_aperture(const struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/gt/intel_gtt.h
609
return ggtt->mappable_end > 0;
drivers/gpu/drm/i915/gt/intel_gtt.h
620
void i915_ggtt_resume(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_gtt.h
64
#define ggtt_total_entries(ggtt) ((ggtt)->vm.total >> PAGE_SHIFT)
drivers/gpu/drm/i915/gt/intel_gtt.h
667
void gen6_ggtt_invalidate(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/gt/intel_lrc.c
1112
vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_lrc.c
1817
vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_renderstate.c
162
so->vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_reset.c
846
for (i = 0; i < gt->ggtt->num_fences; i++) {
drivers/gpu/drm/i915/gt/intel_reset.c
851
vma = READ_ONCE(gt->ggtt->fence_regs[i].vma);
drivers/gpu/drm/i915/gt/intel_reset.c
858
GEM_BUG_ON(vma->fence != >->ggtt->fence_regs[i]);
drivers/gpu/drm/i915/gt/intel_reset.c
926
intel_ggtt_restore_fences(gt->ggtt);
drivers/gpu/drm/i915/gt/intel_ring.c
111
static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size)
drivers/gpu/drm/i915/gt/intel_ring.c
113
struct i915_address_space *vm = &ggtt->vm;
drivers/gpu/drm/i915/gt/intel_ring.c
120
if (IS_ERR(obj) && i915_ggtt_has_aperture(ggtt) && !HAS_LLC(i915))
drivers/gpu/drm/i915/gt/intel_ring.c
174
vma = create_ring_vma(engine->gt->ggtt, size);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
591
vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_timeline.c
32
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/intel_workarounds.c
3042
vma = __vm_create_scratch_for_read(&ce->engine->gt->ggtt->vm,
drivers/gpu/drm/i915/gt/mock_engine.c
38
static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size)
drivers/gpu/drm/i915/gt/mock_engine.c
40
struct i915_address_space *vm = &ggtt->vm;
drivers/gpu/drm/i915/gt/mock_engine.c
75
ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE);
drivers/gpu/drm/i915/gt/selftest_execlists.c
1004
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/selftest_execlists.c
1311
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/selftest_execlists.c
1570
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/selftest_execlists.c
3134
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/selftest_execlists.c
4202
__vm_create_scratch_for_read_pinned(&siblings[0]->gt->ggtt->vm,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1440
if (!gt->ggtt->num_fences && flags & EXEC_OBJECT_NEEDS_FENCE)
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1585
return __igt_reset_evict_vma(gt, >->ggtt->vm,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1614
return __igt_reset_evict_vma(gt, >->ggtt->vm,
drivers/gpu/drm/i915/gt/selftest_lrc.c
36
return __vm_create_scratch_for_read_pinned(>->ggtt->vm, PAGE_SIZE);
drivers/gpu/drm/i915/gt/selftest_mocs.c
80
__vm_create_scratch_for_read_pinned(>->ggtt->vm, PAGE_SIZE);
drivers/gpu/drm/i915/gt/selftest_reset.c
111
ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE);
drivers/gpu/drm/i915/gt/selftest_reset.c
130
ggtt->vm.insert_page(&ggtt->vm, dma,
drivers/gpu/drm/i915/gt/selftest_reset.c
131
ggtt->error_capture.start,
drivers/gpu/drm/i915/gt/selftest_reset.c
137
s = io_mapping_map_wc(&ggtt->iomap,
drivers/gpu/drm/i915/gt/selftest_reset.c
138
ggtt->error_capture.start,
drivers/gpu/drm/i915/gt/selftest_reset.c
160
ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE);
drivers/gpu/drm/i915/gt/selftest_reset.c
22
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/gt/selftest_reset.c
34
if (!drm_mm_node_allocated(&ggtt->error_capture))
drivers/gpu/drm/i915/gt/selftest_reset.c
87
ggtt->vm.insert_page(&ggtt->vm, dma,
drivers/gpu/drm/i915/gt/selftest_reset.c
88
ggtt->error_capture.start,
drivers/gpu/drm/i915/gt/selftest_reset.c
94
s = io_mapping_map_wc(&ggtt->iomap,
drivers/gpu/drm/i915/gt/selftest_reset.c
95
ggtt->error_capture.start,
drivers/gpu/drm/i915/gt/selftest_workarounds.c
125
vma = i915_vma_instance(result, &engine->gt->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/uc/intel_guc.c
804
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1017
if (ggtt->vm.raw_insert_entries)
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1018
ggtt->vm.raw_insert_entries(&ggtt->vm, vma_res,
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1019
i915_gem_get_pat_index(ggtt->vm.i915,
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1023
ggtt->vm.insert_entries(&ggtt->vm, vma_res,
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1024
i915_gem_get_pat_index(ggtt->vm.i915,
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1031
struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt;
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
1037
ggtt->vm.clear_range(&ggtt->vm, vma_res->start, vma_res->node_size);
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
969
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
970
struct drm_mm_node *node = &ggtt->uc_fw;
drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c
997
struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt;
drivers/gpu/drm/i915/gvt/aperture_gm.c
106
mutex_lock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
108
mutex_unlock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
117
mutex_lock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
120
mutex_unlock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
183
mutex_lock(&gvt->gt->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
190
mutex_unlock(&gvt->gt->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
206
mutex_lock(&gvt->gt->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
209
reg = i915_reserve_fence(gvt->gt->ggtt);
drivers/gpu/drm/i915/gvt/aperture_gm.c
218
mutex_unlock(&gvt->gt->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
232
mutex_unlock(&gvt->gt->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
69
mutex_lock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/aperture_gm.c
71
ret = i915_gem_gtt_insert(>->ggtt->vm, NULL, node,
drivers/gpu/drm/i915/gvt/aperture_gm.c
76
mutex_unlock(>->ggtt->vm.mutex);
drivers/gpu/drm/i915/gvt/dmabuf.c
79
gtt_entries = (gen8_pte_t __iomem *)to_gt(dev_priv)->ggtt->gsm +
drivers/gpu/drm/i915/gvt/gtt.c
217
static u64 read_pte64(struct i915_ggtt *ggtt, unsigned long index)
drivers/gpu/drm/i915/gvt/gtt.c
219
void __iomem *addr = (gen8_pte_t __iomem *)ggtt->gsm + index;
drivers/gpu/drm/i915/gvt/gtt.c
233
static void write_pte64(struct i915_ggtt *ggtt, unsigned long index, u64 pte)
drivers/gpu/drm/i915/gvt/gtt.c
235
void __iomem *addr = (gen8_pte_t __iomem *)ggtt->gsm + index;
drivers/gpu/drm/i915/gvt/gtt.c
258
e->val64 = read_pte64(vgpu->gvt->gt->ggtt, index);
drivers/gpu/drm/i915/gvt/gtt.c
2785
write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte);
drivers/gpu/drm/i915/gvt/gtt.c
2793
write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte);
drivers/gpu/drm/i915/gvt/gtt.c
283
write_pte64(vgpu->gvt->gt->ggtt, index, e->val64);
drivers/gpu/drm/i915/gvt/gvt.h
400
#define gvt_to_ggtt(gvt) ((gvt)->gt->ggtt)
drivers/gpu/drm/i915/gvt/kvmgt.c
784
aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap,
drivers/gpu/drm/i915/i915_debugfs.c
329
swizzle_string(to_gt(dev_priv)->ggtt->bit_6_swizzle_x));
drivers/gpu/drm/i915/i915_debugfs.c
331
swizzle_string(to_gt(dev_priv)->ggtt->bit_6_swizzle_y));
drivers/gpu/drm/i915/i915_driver.c
1132
i915_ggtt_suspend(to_gt(dev_priv)->ggtt);
drivers/gpu/drm/i915/i915_driver.c
1256
i915_ggtt_resume(to_gt(dev_priv)->ggtt);
drivers/gpu/drm/i915/i915_gem.c
102
pinned = ggtt->vm.reserved;
drivers/gpu/drm/i915/i915_gem.c
103
list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link)
drivers/gpu/drm/i915/i915_gem.c
107
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
109
args->aper_size = ggtt->vm.total;
drivers/gpu/drm/i915/i915_gem.c
1237
i915_ggtt_resume(to_gt(dev_priv)->ggtt);
drivers/gpu/drm/i915/i915_gem.c
307
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
335
ret = insert_mappable_node(ggtt, node, PAGE_SIZE);
drivers/gpu/drm/i915/i915_gem.c
345
ggtt->vm.clear_range(&ggtt->vm, node->start, node->size);
drivers/gpu/drm/i915/i915_gem.c
346
remove_mappable_node(ggtt, node);
drivers/gpu/drm/i915/i915_gem.c
368
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
372
ggtt->vm.clear_range(&ggtt->vm, node->start, node->size);
drivers/gpu/drm/i915/i915_gem.c
373
remove_mappable_node(ggtt, node);
drivers/gpu/drm/i915/i915_gem.c
384
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
420
ggtt->vm.insert_page(&ggtt->vm,
drivers/gpu/drm/i915/i915_gem.c
430
if (gtt_user_read(&ggtt->iomap, page_base, page_offset,
drivers/gpu/drm/i915/i915_gem.c
547
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
600
intel_gt_flush_ggtt_writes(ggtt->vm.gt);
drivers/gpu/drm/i915/i915_gem.c
601
ggtt->vm.insert_page(&ggtt->vm,
drivers/gpu/drm/i915/i915_gem.c
617
if (ggtt_write(&ggtt->iomap, page_base, page_offset,
drivers/gpu/drm/i915/i915_gem.c
62
insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size)
drivers/gpu/drm/i915/i915_gem.c
628
intel_gt_flush_ggtt_writes(ggtt->vm.gt);
drivers/gpu/drm/i915/i915_gem.c
66
err = mutex_lock_interruptible(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
71
err = drm_mm_insert_node_in_range(&ggtt->vm.mm, node,
drivers/gpu/drm/i915/i915_gem.c
73
0, ggtt->mappable_end,
drivers/gpu/drm/i915/i915_gem.c
76
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
82
remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node)
drivers/gpu/drm/i915/i915_gem.c
84
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
855
&to_gt(i915)->ggtt->userfault_list, userfault_link)
drivers/gpu/drm/i915/i915_gem.c
86
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
867
for (i = 0; i < to_gt(i915)->ggtt->num_fences; i++) {
drivers/gpu/drm/i915/i915_gem.c
868
struct i915_fence_reg *reg = &to_gt(i915)->ggtt->fence_regs[i];
drivers/gpu/drm/i915/i915_gem.c
909
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
925
if (obj->base.size > ggtt->mappable_end)
drivers/gpu/drm/i915/i915_gem.c
94
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem.c
945
obj->base.size > ggtt->mappable_end / 2)
drivers/gpu/drm/i915/i915_gem.c
950
vma = i915_vma_instance(obj, &ggtt->vm, view);
drivers/gpu/drm/i915/i915_gem.c
970
(vma->fence_size > ggtt->mappable_end / 2 ||
drivers/gpu/drm/i915/i915_gem.c
99
if (mutex_lock_interruptible(&ggtt->vm.mutex))
drivers/gpu/drm/i915/i915_gem.c
991
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem.c
993
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/i915_gem_evict.c
188
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/i915_gem_evict.c
190
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/i915_gem_evict.c
353
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/i915_gem_evict.c
356
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/i915_gem_evict.c
48
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/i915_gem_evict.c
52
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) {
drivers/gpu/drm/i915/i915_gem_gtt.c
108
GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm);
drivers/gpu/drm/i915/i915_gem_gtt.c
208
GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm);
drivers/gpu/drm/i915/i915_gem_gtt.c
59
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_gem_gtt.c
62
if (unlikely(ggtt->do_idle_maps))
drivers/gpu/drm/i915/i915_getparam.c
41
value = to_gt(i915)->ggtt->num_fences;
drivers/gpu/drm/i915/i915_gpu_error.c
1147
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/i915_gpu_error.c
1148
const u64 slot = ggtt->error_capture.start;
drivers/gpu/drm/i915/i915_gpu_error.c
1177
if (drm_mm_node_allocated(&ggtt->error_capture)) {
drivers/gpu/drm/i915/i915_gpu_error.c
1182
mutex_lock(&ggtt->error_mutex);
drivers/gpu/drm/i915/i915_gpu_error.c
1183
if (ggtt->vm.raw_insert_page)
drivers/gpu/drm/i915/i915_gpu_error.c
1184
ggtt->vm.raw_insert_page(&ggtt->vm, dma, slot,
drivers/gpu/drm/i915/i915_gpu_error.c
1189
ggtt->vm.insert_page(&ggtt->vm, dma, slot,
drivers/gpu/drm/i915/i915_gpu_error.c
1195
s = io_mapping_map_wc(&ggtt->iomap, slot, PAGE_SIZE);
drivers/gpu/drm/i915/i915_gpu_error.c
1202
ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE);
drivers/gpu/drm/i915/i915_gpu_error.c
1203
mutex_unlock(&ggtt->error_mutex);
drivers/gpu/drm/i915/i915_gpu_error.c
1265
struct i915_ggtt *ggtt = gt->_gt->ggtt;
drivers/gpu/drm/i915/i915_gpu_error.c
1270
for (i = 0; i < ggtt->num_fences; i++)
drivers/gpu/drm/i915/i915_gpu_error.c
1275
for (i = 0; i < ggtt->num_fences; i++)
drivers/gpu/drm/i915/i915_gpu_error.c
1280
for (i = 0; i < ggtt->num_fences; i++)
drivers/gpu/drm/i915/i915_hdcp_gsc.c
65
vma = i915_vma_instance(obj, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/i915_initial_plane.c
165
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_initial_plane.c
173
ret = i915_gem_gtt_reserve(&ggtt->vm, NULL, &orig_mm,
drivers/gpu/drm/i915/i915_initial_plane.c
180
vma = i915_vma_instance(obj, &to_gt(i915)->ggtt->vm, NULL);
drivers/gpu/drm/i915/i915_initial_plane.c
38
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/i915_initial_plane.c
56
dma_addr = intel_ggtt_read_entry(&ggtt->vm, base, &is_present, &is_local);
drivers/gpu/drm/i915/i915_perf.c
1376
scratch = __vm_create_scratch_for_read_pinned(&ce->engine->gt->ggtt->vm, 4);
drivers/gpu/drm/i915/i915_perf.c
1875
vma = i915_vma_instance(bo, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/i915_perf.c
1986
vma = i915_vma_instance(bo, >->ggtt->vm, NULL);
drivers/gpu/drm/i915/i915_perf.c
2252
&stream->engine->gt->ggtt->vm,
drivers/gpu/drm/i915/i915_request.c
1198
return to->engine->gt->ggtt == from->engine->gt->ggtt;
drivers/gpu/drm/i915/i915_vgpu.c
150
static void vgt_deballoon_space(struct i915_ggtt *ggtt,
drivers/gpu/drm/i915/i915_vgpu.c
153
struct drm_i915_private *dev_priv = ggtt->vm.i915;
drivers/gpu/drm/i915/i915_vgpu.c
163
ggtt->vm.reserved -= node->size;
drivers/gpu/drm/i915/i915_vgpu.c
174
void intel_vgt_deballoon(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/i915_vgpu.c
176
struct drm_i915_private *dev_priv = ggtt->vm.i915;
drivers/gpu/drm/i915/i915_vgpu.c
179
if (!intel_vgpu_active(ggtt->vm.i915))
drivers/gpu/drm/i915/i915_vgpu.c
185
vgt_deballoon_space(ggtt, &bl_info.space[i]);
drivers/gpu/drm/i915/i915_vgpu.c
188
static int vgt_balloon_space(struct i915_ggtt *ggtt,
drivers/gpu/drm/i915/i915_vgpu.c
192
struct drm_i915_private *dev_priv = ggtt->vm.i915;
drivers/gpu/drm/i915/i915_vgpu.c
202
ret = i915_gem_gtt_reserve(&ggtt->vm, NULL, node,
drivers/gpu/drm/i915/i915_vgpu.c
206
ggtt->vm.reserved += size;
drivers/gpu/drm/i915/i915_vgpu.c
255
int intel_vgt_balloon(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/i915_vgpu.c
257
struct drm_i915_private *dev_priv = ggtt->vm.i915;
drivers/gpu/drm/i915/i915_vgpu.c
259
unsigned long ggtt_end = ggtt->vm.total;
drivers/gpu/drm/i915/i915_vgpu.c
265
if (!intel_vgpu_active(ggtt->vm.i915))
drivers/gpu/drm/i915/i915_vgpu.c
288
if (mappable_end > ggtt->mappable_end ||
drivers/gpu/drm/i915/i915_vgpu.c
289
unmappable_base < ggtt->mappable_end ||
drivers/gpu/drm/i915/i915_vgpu.c
296
if (unmappable_base > ggtt->mappable_end) {
drivers/gpu/drm/i915/i915_vgpu.c
297
ret = vgt_balloon_space(ggtt, &bl_info.space[2],
drivers/gpu/drm/i915/i915_vgpu.c
298
ggtt->mappable_end, unmappable_base);
drivers/gpu/drm/i915/i915_vgpu.c
305
ret = vgt_balloon_space(ggtt, &bl_info.space[3],
drivers/gpu/drm/i915/i915_vgpu.c
313
ret = vgt_balloon_space(ggtt, &bl_info.space[0],
drivers/gpu/drm/i915/i915_vgpu.c
320
if (mappable_end < ggtt->mappable_end) {
drivers/gpu/drm/i915/i915_vgpu.c
321
ret = vgt_balloon_space(ggtt, &bl_info.space[1],
drivers/gpu/drm/i915/i915_vgpu.c
322
mappable_end, ggtt->mappable_end);
drivers/gpu/drm/i915/i915_vgpu.c
332
vgt_deballoon_space(ggtt, &bl_info.space[0]);
drivers/gpu/drm/i915/i915_vgpu.c
334
vgt_deballoon_space(ggtt, &bl_info.space[3]);
drivers/gpu/drm/i915/i915_vgpu.c
336
vgt_deballoon_space(ggtt, &bl_info.space[2]);
drivers/gpu/drm/i915/i915_vgpu.h
39
int intel_vgt_balloon(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/i915_vgpu.h
40
void intel_vgt_deballoon(struct i915_ggtt *ggtt);
drivers/gpu/drm/i915/i915_vma.c
158
GEM_BUG_ON(vm == &vm->gt->ggtt->alias->vm);
drivers/gpu/drm/i915/i915_vma.c
1660
struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm);
drivers/gpu/drm/i915/i915_vma.c
1676
list_for_each_entry(gt, &ggtt->gt_list, ggtt_link)
drivers/gpu/drm/i915/selftests/i915_gem.c
107
i915_ggtt_suspend(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/selftests/i915_gem.c
117
i915_ggtt_suspend(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/selftests/i915_gem.c
133
i915_ggtt_resume(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/selftests/i915_gem.c
45
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem.c
46
const u64 slot = ggtt->error_capture.start;
drivers/gpu/drm/i915/selftests/i915_gem.c
52
if (!i915_ggtt_has_aperture(ggtt))
drivers/gpu/drm/i915/selftests/i915_gem.c
62
ggtt->vm.insert_page(&ggtt->vm, dma, slot,
drivers/gpu/drm/i915/selftests/i915_gem.c
67
s = io_mapping_map_atomic_wc(&ggtt->iomap, slot);
drivers/gpu/drm/i915/selftests/i915_gem.c
74
ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
104
i915_gem_drain_freed_objects(ggtt->vm.i915);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
110
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
116
err = populate_ggtt(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
121
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
122
err = i915_gem_evict_something(&ggtt->vm, NULL,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
126
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
133
unpin_ggtt(ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
136
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
137
err = i915_gem_evict_something(&ggtt->vm, NULL,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
141
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
149
cleanup_objects(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
156
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
166
err = populate_ggtt(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
186
cleanup_objects(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
193
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
203
err = populate_ggtt(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
208
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
209
err = i915_gem_evict_for_node(&ggtt->vm, NULL, &target, 0);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
210
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
217
unpin_ggtt(ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
220
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
221
err = i915_gem_evict_for_node(&ggtt->vm, NULL, &target, 0);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
222
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
230
cleanup_objects(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
244
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
262
ggtt->vm.mm.color_adjust = mock_color_adjust;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
263
GEM_BUG_ON(!i915_vm_has_cache_coloring(&ggtt->vm));
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
301
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
302
err = i915_gem_evict_for_node(&ggtt->vm, NULL, &target, 0);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
303
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
314
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
315
err = i915_gem_evict_for_node(&ggtt->vm, NULL, &target, 0);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
316
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
326
unpin_ggtt(ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
327
cleanup_objects(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
328
ggtt->vm.mm.color_adjust = NULL;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
335
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
342
err = populate_ggtt(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
347
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
348
err = i915_gem_evict_vm(&ggtt->vm, NULL, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
349
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
356
unpin_ggtt(ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
359
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
360
err = i915_gem_evict_vm(&ggtt->vm, &ww, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
361
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
371
cleanup_objects(ggtt, &objects);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
379
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
410
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
411
err = i915_gem_gtt_insert(&ggtt->vm, NULL, &hole,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
413
0, ggtt->vm.total,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
423
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
425
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
431
if (i915_gem_gtt_insert(&ggtt->vm, NULL, &r->node,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
433
0, ggtt->vm.total,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
445
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
48
static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects)
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
517
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
531
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
57
obj = i915_gem_object_create_internal(ggtt->vm.i915,
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
75
count, ggtt->vm.total / PAGE_SIZE);
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
77
if (list_empty(&ggtt->vm.bound_list)) {
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
85
static void unpin_ggtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
89
list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link)
drivers/gpu/drm/i915/selftests/i915_gem_evict.c
94
static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list)
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1288
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1295
list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1296
drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) {
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1300
if (ggtt->vm.mm.color_adjust)
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1301
ggtt->vm.mm.color_adjust(node, 0,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1306
err = func(&ggtt->vm, hole_start, hole_end, end_time);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1353
struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1360
if (!i915_ggtt_has_aperture(ggtt))
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1372
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1373
err = drm_mm_insert_node_in_range(&ggtt->vm.mm, &tmp,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1376
0, ggtt->mappable_end,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1378
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1387
ggtt->vm.insert_page(&ggtt->vm,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1405
vaddr = io_mapping_map_atomic_wc(&ggtt->iomap, offset);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1409
intel_gt_flush_ggtt_writes(ggtt->vm.gt);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1417
vaddr = io_mapping_map_atomic_wc(&ggtt->iomap, offset);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1431
ggtt->vm.clear_range(&ggtt->vm, tmp.start, tmp.size);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1433
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1435
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1485
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1487
return exercise_mock(ggtt->vm.i915, fill_hole);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1492
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1494
return exercise_mock(ggtt->vm.i915, walk_hole);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1499
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1501
return exercise_mock(ggtt->vm.i915, pot_hole);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1506
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1508
return exercise_mock(ggtt->vm.i915, drunk_hole);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1540
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1555
total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1559
obj = i915_gem_object_create_internal(ggtt->vm.i915,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1573
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1582
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1600
total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1604
obj = i915_gem_object_create_internal(ggtt->vm.i915,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1619
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1628
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1649
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1662
0, ggtt->vm.total,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1669
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1720
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1729
ggtt->vm.total + I915_GTT_PAGE_SIZE, 0,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1730
0, ggtt->vm.total,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1761
mutex_lock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1762
err = i915_gem_gtt_insert(&ggtt->vm, NULL, &tmp,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1767
mutex_unlock(&ggtt->vm.mutex);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1778
total + I915_GTT_PAGE_SIZE <= ggtt->vm.total;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1782
obj = i915_gem_object_create_internal(ggtt->vm.i915,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1797
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1811
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1823
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1843
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1861
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1877
total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total;
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1881
obj = i915_gem_object_create_internal(ggtt->vm.i915,
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1896
vma = i915_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1905
total, ggtt->vm.total, err);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1948
err = i915_subtests(tests, gt->ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1952
mock_fini_ggtt(gt->ggtt);
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1980
GEM_BUG_ON(offset_in_page(to_gt(i915)->ggtt->vm.total));
drivers/gpu/drm/i915/selftests/i915_vma.c
151
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_vma.c
152
struct drm_i915_private *i915 = ggtt->vm.i915;
drivers/gpu/drm/i915/selftests/i915_vma.c
259
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_vma.c
270
VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
271
VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
272
VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
274
VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
275
INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
drivers/gpu/drm/i915/selftests/i915_vma.c
276
VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
277
INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
drivers/gpu/drm/i915/selftests/i915_vma.c
282
VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
drivers/gpu/drm/i915/selftests/i915_vma.c
283
VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
drivers/gpu/drm/i915/selftests/i915_vma.c
284
NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
drivers/gpu/drm/i915/selftests/i915_vma.c
285
VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
drivers/gpu/drm/i915/selftests/i915_vma.c
286
VALID(ggtt->vm.total, PIN_GLOBAL),
drivers/gpu/drm/i915/selftests/i915_vma.c
287
NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
drivers/gpu/drm/i915/selftests/i915_vma.c
289
INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
290
INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
293
VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
302
NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
drivers/gpu/drm/i915/selftests/i915_vma.c
303
NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
drivers/gpu/drm/i915/selftests/i915_vma.c
304
NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
305
NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
drivers/gpu/drm/i915/selftests/i915_vma.c
322
GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
drivers/gpu/drm/i915/selftests/i915_vma.c
324
obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
drivers/gpu/drm/i915/selftests/i915_vma.c
328
vma = checked_vma_instance(obj, &ggtt->vm, NULL);
drivers/gpu/drm/i915/selftests/i915_vma.c
549
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_vma.c
550
struct i915_address_space *vm = &ggtt->vm;
drivers/gpu/drm/i915/selftests/i915_vma.c
796
struct i915_ggtt *ggtt = arg;
drivers/gpu/drm/i915/selftests/i915_vma.c
797
struct i915_address_space *vm = &ggtt->vm;
drivers/gpu/drm/i915/selftests/i915_vma.c
947
err = i915_subtests(tests, gt->ggtt);
drivers/gpu/drm/i915/selftests/i915_vma.c
951
mock_fini_ggtt(gt->ggtt);
drivers/gpu/drm/i915/selftests/i915_vma.c
993
if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
drivers/gpu/drm/i915/selftests/mock_gem_device.c
238
to_gt(i915)->vm = i915_vm_get(&to_gt(i915)->ggtt->vm);
drivers/gpu/drm/i915/selftests/mock_gem_device.c
74
mock_fini_ggtt(to_gt(i915)->ggtt);
drivers/gpu/drm/i915/selftests/mock_gtt.c
109
struct i915_ggtt *ggtt = gt->ggtt;
drivers/gpu/drm/i915/selftests/mock_gtt.c
111
ggtt->vm.gt = gt;
drivers/gpu/drm/i915/selftests/mock_gtt.c
112
ggtt->vm.i915 = gt->i915;
drivers/gpu/drm/i915/selftests/mock_gtt.c
113
ggtt->vm.is_ggtt = true;
drivers/gpu/drm/i915/selftests/mock_gtt.c
115
ggtt->gmadr = DEFINE_RES_MEM(0, 2048 * PAGE_SIZE);
drivers/gpu/drm/i915/selftests/mock_gtt.c
116
ggtt->mappable_end = resource_size(&ggtt->gmadr);
drivers/gpu/drm/i915/selftests/mock_gtt.c
117
ggtt->vm.total = 4096 * PAGE_SIZE;
drivers/gpu/drm/i915/selftests/mock_gtt.c
119
ggtt->vm.alloc_pt_dma = alloc_pt_dma;
drivers/gpu/drm/i915/selftests/mock_gtt.c
120
ggtt->vm.alloc_scratch_dma = alloc_pt_dma;
drivers/gpu/drm/i915/selftests/mock_gtt.c
122
ggtt->vm.clear_range = mock_clear_range;
drivers/gpu/drm/i915/selftests/mock_gtt.c
123
ggtt->vm.insert_page = mock_insert_page;
drivers/gpu/drm/i915/selftests/mock_gtt.c
124
ggtt->vm.insert_entries = mock_insert_entries;
drivers/gpu/drm/i915/selftests/mock_gtt.c
125
ggtt->vm.cleanup = mock_cleanup;
drivers/gpu/drm/i915/selftests/mock_gtt.c
127
ggtt->vm.vma_ops.bind_vma = mock_bind_ggtt;
drivers/gpu/drm/i915/selftests/mock_gtt.c
128
ggtt->vm.vma_ops.unbind_vma = mock_unbind_ggtt;
drivers/gpu/drm/i915/selftests/mock_gtt.c
130
i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT);
drivers/gpu/drm/i915/selftests/mock_gtt.c
133
void mock_fini_ggtt(struct i915_ggtt *ggtt)
drivers/gpu/drm/i915/selftests/mock_gtt.c
135
i915_address_space_fini(&ggtt->vm);
drivers/gpu/drm/i915/selftests/mock_gtt.h
33
void mock_fini_ggtt(struct i915_ggtt *ggtt);
drivers/gpu/drm/xe/display/xe_fb_pin.c
132
u64 pte = xe_ggtt_encode_pte_flags(ggtt, bo, xe->pat.idx[XE_CACHE_NONE]);
drivers/gpu/drm/xe/display/xe_fb_pin.c
174
write_ggtt_rotated(struct xe_ggtt *ggtt, u32 *ggtt_ofs,
drivers/gpu/drm/xe/display/xe_fb_pin.c
188
write_pte(ggtt, *ggtt_ofs, pte_flags | addr);
drivers/gpu/drm/xe/display/xe_fb_pin.c
203
static void write_ggtt_rotated_node(struct xe_ggtt *ggtt, struct xe_ggtt_node *node,
drivers/gpu/drm/xe/display/xe_fb_pin.c
212
write_ggtt_rotated(ggtt, &ggtt_ofs, pte_flags, write_pte,
drivers/gpu/drm/xe/display/xe_fb_pin.c
229
struct xe_ggtt *ggtt = tile0->mem.ggtt;
drivers/gpu/drm/xe/display/xe_fb_pin.c
258
pte = xe_ggtt_encode_pte_flags(ggtt, bo, xe->pat.idx[XE_CACHE_NONE]);
drivers/gpu/drm/xe/display/xe_fb_pin.c
259
vma->node = xe_ggtt_node_insert_transform(ggtt, bo, pte,
drivers/gpu/drm/xe/display/xe_fb_pin.c
26
struct xe_ggtt *ggtt = xe_device_get_root_tile(xe)->mem.ggtt;
drivers/gpu/drm/xe/display/xe_fb_pin.c
28
u64 pte = xe_ggtt_encode_pte_flags(ggtt, bo, xe->pat.idx[XE_CACHE_NONE]);
drivers/gpu/drm/xe/display/xe_fb_pin.c
59
struct xe_ggtt *ggtt = xe_device_get_root_tile(xe)->mem.ggtt;
drivers/gpu/drm/xe/display/xe_fb_pin.c
61
u64 pte = xe_ggtt_encode_pte_flags(ggtt, bo, xe->pat.idx[XE_CACHE_NONE]);
drivers/gpu/drm/xe/display/xe_fb_pin.c
89
struct xe_ggtt *ggtt = tile0->mem.ggtt;
drivers/gpu/drm/xe/display/xe_initial_plane.c
60
u64 pte = xe_ggtt_read_pte(tile0->mem.ggtt, base);
drivers/gpu/drm/xe/tests/xe_guc_buf_kunit.c
39
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/tests/xe_guc_buf_kunit.c
41
bo->ggtt_node[tile->id] = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/tests/xe_guc_buf_kunit.c
59
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/tests/xe_guc_buf_kunit.c
65
ggtt = xe_device_get_root_tile(test->priv)->mem.ggtt;
drivers/gpu/drm/xe/tests/xe_guc_buf_kunit.c
69
xe_ggtt_init_kunit(ggtt, DUT_GGTT_START,
drivers/gpu/drm/xe/xe_bo.c
1718
xe_ggtt_remove_bo(tile->mem.ggtt, bo);
drivers/gpu/drm/xe/xe_bo.c
2368
err = xe_ggtt_insert_bo_at(t->mem.ggtt, bo,
drivers/gpu/drm/xe/xe_bo.c
2372
err = xe_ggtt_insert_bo(t->mem.ggtt, bo, exec);
drivers/gpu/drm/xe/xe_bo.c
3612
xe_ggtt_might_lock(tile->mem.ggtt);
drivers/gpu/drm/xe/xe_bo_evict.c
208
xe_ggtt_map_bo_unlocked(tile->mem.ggtt, bo);
drivers/gpu/drm/xe/xe_device.c
878
err = xe_ggtt_init_early(tile->mem.ggtt);
drivers/gpu/drm/xe/xe_device_types.h
200
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
1003
static void xe_ggtt_assign_locked(struct xe_ggtt *ggtt, const struct drm_mm_node *node, u16 vfid)
drivers/gpu/drm/xe/xe_ggtt.c
1010
lockdep_assert_held(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1016
ggtt->pt_ops->ggtt_set_pte(ggtt, start, pte);
drivers/gpu/drm/xe/xe_ggtt.c
1020
xe_ggtt_invalidate(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
1034
mutex_lock(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1035
xe_ggtt_assign_locked(node->ggtt, &node->base, vfid);
drivers/gpu/drm/xe/xe_ggtt.c
1036
mutex_unlock(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1050
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
1058
guard(mutex)(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1063
ggtt = node->ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
1068
pte = ggtt->pt_ops->ggtt_get_pte(ggtt, start);
drivers/gpu/drm/xe/xe_ggtt.c
1092
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
1098
guard(mutex)(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1103
ggtt = node->ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
1109
ggtt->pt_ops->ggtt_set_pte(ggtt, start, vfid_pte);
drivers/gpu/drm/xe/xe_ggtt.c
1112
xe_ggtt_invalidate(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
1126
int xe_ggtt_dump(struct xe_ggtt *ggtt, struct drm_printer *p)
drivers/gpu/drm/xe/xe_ggtt.c
1130
err = mutex_lock_interruptible(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1134
drm_mm_print(&ggtt->mm, p);
drivers/gpu/drm/xe/xe_ggtt.c
1135
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1149
u64 xe_ggtt_print_holes(struct xe_ggtt *ggtt, u64 alignment, struct drm_printer *p)
drivers/gpu/drm/xe/xe_ggtt.c
1151
const struct drm_mm *mm = &ggtt->mm;
drivers/gpu/drm/xe/xe_ggtt.c
1157
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1159
hole_start = max(hole_start, ggtt->start);
drivers/gpu/drm/xe/xe_ggtt.c
1172
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
1186
u64 xe_ggtt_encode_pte_flags(struct xe_ggtt *ggtt,
drivers/gpu/drm/xe/xe_ggtt.c
1189
return ggtt->pt_ops->pte_encode_flags(bo, pat_index);
drivers/gpu/drm/xe/xe_ggtt.c
1199
u64 xe_ggtt_read_pte(struct xe_ggtt *ggtt, u64 offset)
drivers/gpu/drm/xe/xe_ggtt.c
1201
return ioread64(ggtt->gsm + (offset / XE_PAGE_SIZE));
drivers/gpu/drm/xe/xe_ggtt.c
124
static void ggtt_update_access_counter(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
126
struct xe_tile *tile = ggtt->tile;
drivers/gpu/drm/xe/xe_ggtt.c
149
lockdep_assert_held(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
151
if ((++ggtt->access_count % max_gtt_writes) == 0) {
drivers/gpu/drm/xe/xe_ggtt.c
153
ggtt->access_count = 0;
drivers/gpu/drm/xe/xe_ggtt.c
163
u64 xe_ggtt_start(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
165
return ggtt->start;
drivers/gpu/drm/xe/xe_ggtt.c
174
u64 xe_ggtt_size(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
176
return ggtt->size;
drivers/gpu/drm/xe/xe_ggtt.c
179
static void xe_ggtt_set_pte(struct xe_ggtt *ggtt, u64 addr, u64 pte)
drivers/gpu/drm/xe/xe_ggtt.c
181
xe_tile_assert(ggtt->tile, !(addr & XE_PTE_MASK));
drivers/gpu/drm/xe/xe_ggtt.c
182
xe_tile_assert(ggtt->tile, addr < ggtt->start + ggtt->size);
drivers/gpu/drm/xe/xe_ggtt.c
184
writeq(pte, &ggtt->gsm[addr >> XE_PTE_SHIFT]);
drivers/gpu/drm/xe/xe_ggtt.c
187
static void xe_ggtt_set_pte_and_flush(struct xe_ggtt *ggtt, u64 addr, u64 pte)
drivers/gpu/drm/xe/xe_ggtt.c
189
xe_ggtt_set_pte(ggtt, addr, pte);
drivers/gpu/drm/xe/xe_ggtt.c
190
ggtt_update_access_counter(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
193
static u64 xe_ggtt_get_pte(struct xe_ggtt *ggtt, u64 addr)
drivers/gpu/drm/xe/xe_ggtt.c
195
xe_tile_assert(ggtt->tile, !(addr & XE_PTE_MASK));
drivers/gpu/drm/xe/xe_ggtt.c
196
xe_tile_assert(ggtt->tile, addr < ggtt->size);
drivers/gpu/drm/xe/xe_ggtt.c
198
return readq(&ggtt->gsm[addr >> XE_PTE_SHIFT]);
drivers/gpu/drm/xe/xe_ggtt.c
201
static void xe_ggtt_clear(struct xe_ggtt *ggtt, u64 start, u64 size)
drivers/gpu/drm/xe/xe_ggtt.c
203
u16 pat_index = tile_to_xe(ggtt->tile)->pat.idx[XE_CACHE_WB];
drivers/gpu/drm/xe/xe_ggtt.c
207
xe_tile_assert(ggtt->tile, start < end);
drivers/gpu/drm/xe/xe_ggtt.c
209
if (ggtt->scratch)
drivers/gpu/drm/xe/xe_ggtt.c
210
scratch_pte = xe_bo_addr(ggtt->scratch, 0, XE_PAGE_SIZE) |
drivers/gpu/drm/xe/xe_ggtt.c
211
ggtt->pt_ops->pte_encode_flags(ggtt->scratch,
drivers/gpu/drm/xe/xe_ggtt.c
217
ggtt->pt_ops->ggtt_set_pte(ggtt, start, scratch_pte);
drivers/gpu/drm/xe/xe_ggtt.c
222
static void primelockdep(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
228
might_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
243
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
245
ggtt = drmm_kzalloc(&xe->drm, sizeof(*ggtt), GFP_KERNEL);
drivers/gpu/drm/xe/xe_ggtt.c
246
if (!ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
249
if (drmm_mutex_init(&xe->drm, &ggtt->lock))
drivers/gpu/drm/xe/xe_ggtt.c
252
primelockdep(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
253
ggtt->tile = tile;
drivers/gpu/drm/xe/xe_ggtt.c
255
return ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
260
struct xe_ggtt *ggtt = arg;
drivers/gpu/drm/xe/xe_ggtt.c
262
destroy_workqueue(ggtt->wq);
drivers/gpu/drm/xe/xe_ggtt.c
263
drm_mm_takedown(&ggtt->mm);
drivers/gpu/drm/xe/xe_ggtt.c
268
struct xe_ggtt *ggtt = arg;
drivers/gpu/drm/xe/xe_ggtt.c
270
ggtt->scratch = NULL;
drivers/gpu/drm/xe/xe_ggtt.c
274
void xe_ggtt_might_lock(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
276
might_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
298
static void __xe_ggtt_init_early(struct xe_ggtt *ggtt, u64 start, u64 size)
drivers/gpu/drm/xe/xe_ggtt.c
300
ggtt->start = start;
drivers/gpu/drm/xe/xe_ggtt.c
301
ggtt->size = size;
drivers/gpu/drm/xe/xe_ggtt.c
302
drm_mm_init(&ggtt->mm, start, size);
drivers/gpu/drm/xe/xe_ggtt.c
305
int xe_ggtt_init_kunit(struct xe_ggtt *ggtt, u32 start, u32 size)
drivers/gpu/drm/xe/xe_ggtt.c
307
__xe_ggtt_init_early(ggtt, start, size);
drivers/gpu/drm/xe/xe_ggtt.c
314
struct xe_ggtt *ggtt = arg;
drivers/gpu/drm/xe/xe_ggtt.c
316
scoped_guard(mutex, &ggtt->lock)
drivers/gpu/drm/xe/xe_ggtt.c
317
ggtt->flags &= ~XE_GGTT_FLAGS_ONLINE;
drivers/gpu/drm/xe/xe_ggtt.c
318
drain_workqueue(ggtt->wq);
drivers/gpu/drm/xe/xe_ggtt.c
332
int xe_ggtt_init_early(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
334
struct xe_device *xe = tile_to_xe(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
346
xe_tile_err(ggtt->tile, "Hardware reported no preallocated GSM\n");
drivers/gpu/drm/xe/xe_ggtt.c
357
ggtt->gsm = ggtt->tile->mmio.regs + SZ_8M;
drivers/gpu/drm/xe/xe_ggtt.c
359
ggtt->flags |= XE_GGTT_FLAGS_64K;
drivers/gpu/drm/xe/xe_ggtt.c
365
ggtt->pt_ops =
drivers/gpu/drm/xe/xe_ggtt.c
366
(ggtt->tile->media_gt && XE_GT_WA(ggtt->tile->media_gt, 22019338487)) ||
drivers/gpu/drm/xe/xe_ggtt.c
367
(ggtt->tile->primary_gt && XE_GT_WA(ggtt->tile->primary_gt, 22019338487)) ?
drivers/gpu/drm/xe/xe_ggtt.c
370
ggtt->pt_ops = &xelp_pt_ops;
drivers/gpu/drm/xe/xe_ggtt.c
372
ggtt->wq = alloc_workqueue("xe-ggtt-wq", WQ_MEM_RECLAIM, 0);
drivers/gpu/drm/xe/xe_ggtt.c
373
if (!ggtt->wq)
drivers/gpu/drm/xe/xe_ggtt.c
376
__xe_ggtt_init_early(ggtt, ggtt_start, ggtt_size);
drivers/gpu/drm/xe/xe_ggtt.c
378
err = drmm_add_action_or_reset(&xe->drm, ggtt_fini_early, ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
382
ggtt->flags |= XE_GGTT_FLAGS_ONLINE;
drivers/gpu/drm/xe/xe_ggtt.c
383
err = devm_add_action_or_reset(xe->drm.dev, dev_fini_ggtt, ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
388
err = xe_tile_sriov_vf_prepare_ggtt(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
397
static void xe_ggtt_invalidate(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
399
static void xe_ggtt_initial_clear(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
405
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
406
drm_mm_for_each_hole(hole, &ggtt->mm, start, end)
drivers/gpu/drm/xe/xe_ggtt.c
407
xe_ggtt_clear(ggtt, start, end - start);
drivers/gpu/drm/xe/xe_ggtt.c
409
xe_ggtt_invalidate(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
410
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
415
struct xe_ggtt *ggtt = node->ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
418
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
419
bound = ggtt->flags & XE_GGTT_FLAGS_ONLINE;
drivers/gpu/drm/xe/xe_ggtt.c
421
xe_ggtt_clear(ggtt, node->base.start, node->base.size);
drivers/gpu/drm/xe/xe_ggtt.c
424
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
430
xe_ggtt_invalidate(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
440
struct xe_device *xe = tile_to_xe(node->ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
453
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
456
if (!node || !node->ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
459
ggtt = node->ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
460
xe = tile_to_xe(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
468
queue_work(ggtt->wq, &node->delayed_removal_work);
drivers/gpu/drm/xe/xe_ggtt.c
478
int xe_ggtt_init(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
480
struct xe_device *xe = tile_to_xe(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
490
if (ggtt->flags & XE_GGTT_FLAGS_64K)
drivers/gpu/drm/xe/xe_ggtt.c
493
flags |= XE_BO_FLAG_VRAM_IF_DGFX(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
495
ggtt->scratch = xe_managed_bo_create_pin_map(xe, ggtt->tile, XE_PAGE_SIZE, flags);
drivers/gpu/drm/xe/xe_ggtt.c
496
if (IS_ERR(ggtt->scratch)) {
drivers/gpu/drm/xe/xe_ggtt.c
497
err = PTR_ERR(ggtt->scratch);
drivers/gpu/drm/xe/xe_ggtt.c
501
xe_map_memset(xe, &ggtt->scratch->vmap, 0, 0, xe_bo_size(ggtt->scratch));
drivers/gpu/drm/xe/xe_ggtt.c
503
xe_ggtt_initial_clear(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
505
return devm_add_action_or_reset(xe->drm.dev, ggtt_fini, ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
507
ggtt->scratch = NULL;
drivers/gpu/drm/xe/xe_ggtt.c
522
static void xe_ggtt_invalidate(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
524
struct xe_device *xe = tile_to_xe(ggtt->tile);
drivers/gpu/drm/xe/xe_ggtt.c
535
ggtt_invalidate_gt_tlb(ggtt->tile->primary_gt);
drivers/gpu/drm/xe/xe_ggtt.c
536
ggtt_invalidate_gt_tlb(ggtt->tile->media_gt);
drivers/gpu/drm/xe/xe_ggtt.c
539
static void xe_ggtt_dump_node(struct xe_ggtt *ggtt,
drivers/gpu/drm/xe/xe_ggtt.c
546
xe_tile_dbg(ggtt->tile, "GGTT %#llx-%#llx (%s) %s\n",
drivers/gpu/drm/xe/xe_ggtt.c
564
struct xe_ggtt *ggtt = node->ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
567
xe_tile_assert(ggtt->tile, start < end);
drivers/gpu/drm/xe/xe_ggtt.c
568
xe_tile_assert(ggtt->tile, IS_ALIGNED(start, XE_PAGE_SIZE));
drivers/gpu/drm/xe/xe_ggtt.c
569
xe_tile_assert(ggtt->tile, IS_ALIGNED(end, XE_PAGE_SIZE));
drivers/gpu/drm/xe/xe_ggtt.c
570
xe_tile_assert(ggtt->tile, !drm_mm_node_allocated(&node->base));
drivers/gpu/drm/xe/xe_ggtt.c
571
lockdep_assert_held(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
577
err = drm_mm_reserve_node(&ggtt->mm, &node->base);
drivers/gpu/drm/xe/xe_ggtt.c
579
if (xe_tile_WARN(ggtt->tile, err, "Failed to balloon GGTT %#llx-%#llx (%pe)\n",
drivers/gpu/drm/xe/xe_ggtt.c
583
xe_ggtt_dump_node(ggtt, &node->base, "balloon");
drivers/gpu/drm/xe/xe_ggtt.c
599
lockdep_assert_held(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
601
xe_ggtt_dump_node(node->ggtt, &node->base, "remove-balloon");
drivers/gpu/drm/xe/xe_ggtt.c
606
static void xe_ggtt_assert_fit(struct xe_ggtt *ggtt, u64 start, u64 size)
drivers/gpu/drm/xe/xe_ggtt.c
608
struct xe_tile *tile = ggtt->tile;
drivers/gpu/drm/xe/xe_ggtt.c
610
xe_tile_assert(tile, start >= ggtt->start);
drivers/gpu/drm/xe/xe_ggtt.c
611
xe_tile_assert(tile, start + size <= ggtt->start + ggtt->size);
drivers/gpu/drm/xe/xe_ggtt.c
629
void xe_ggtt_shift_nodes_locked(struct xe_ggtt *ggtt, s64 shift)
drivers/gpu/drm/xe/xe_ggtt.c
631
struct xe_tile *tile __maybe_unused = ggtt->tile;
drivers/gpu/drm/xe/xe_ggtt.c
635
lockdep_assert_held(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
638
drm_mm_for_each_node_safe(node, tmpn, &ggtt->mm)
drivers/gpu/drm/xe/xe_ggtt.c
639
xe_ggtt_assert_fit(ggtt, node->start + shift, node->size);
drivers/gpu/drm/xe/xe_ggtt.c
641
drm_mm_for_each_node_safe(node, tmpn, &ggtt->mm) {
drivers/gpu/drm/xe/xe_ggtt.c
649
drm_mm_reserve_node(&ggtt->mm, node);
drivers/gpu/drm/xe/xe_ggtt.c
657
return drm_mm_insert_node_generic(&node->ggtt->mm, &node->base, size, align, 0,
drivers/gpu/drm/xe/xe_ggtt.c
675
if (!node || !node->ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
678
mutex_lock(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
681
mutex_unlock(&node->ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
701
struct xe_ggtt_node *xe_ggtt_node_init(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
709
node->ggtt = ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
735
if (!node || !node->ggtt)
drivers/gpu/drm/xe/xe_ggtt.c
762
static void xe_ggtt_map_bo(struct xe_ggtt *ggtt, struct xe_ggtt_node *node,
drivers/gpu/drm/xe/xe_ggtt.c
779
ggtt->pt_ops->ggtt_set_pte(ggtt, end - cur.remaining,
drivers/gpu/drm/xe/xe_ggtt.c
78
struct xe_ggtt *ggtt;
drivers/gpu/drm/xe/xe_ggtt.c
787
ggtt->pt_ops->ggtt_set_pte(ggtt, end - cur.remaining,
drivers/gpu/drm/xe/xe_ggtt.c
799
void xe_ggtt_map_bo_unlocked(struct xe_ggtt *ggtt, struct xe_bo *bo)
drivers/gpu/drm/xe/xe_ggtt.c
802
u16 pat_index = tile_to_xe(ggtt->tile)->pat.idx[cache_mode];
drivers/gpu/drm/xe/xe_ggtt.c
805
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
806
pte = ggtt->pt_ops->pte_encode_flags(bo, pat_index);
drivers/gpu/drm/xe/xe_ggtt.c
807
xe_ggtt_map_bo(ggtt, bo->ggtt_node[ggtt->tile->id], bo, pte);
drivers/gpu/drm/xe/xe_ggtt.c
808
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
826
struct xe_ggtt_node *xe_ggtt_node_insert_transform(struct xe_ggtt *ggtt,
drivers/gpu/drm/xe/xe_ggtt.c
834
node = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
838
if (mutex_lock_interruptible(&ggtt->lock) < 0) {
drivers/gpu/drm/xe/xe_ggtt.c
848
transform(ggtt, node, pte_flags, ggtt->pt_ops->ggtt_set_pte, arg);
drivers/gpu/drm/xe/xe_ggtt.c
850
xe_ggtt_map_bo(ggtt, node, bo, pte_flags);
drivers/gpu/drm/xe/xe_ggtt.c
852
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
856
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
862
static int __xe_ggtt_insert_bo_at(struct xe_ggtt *ggtt, struct xe_bo *bo,
drivers/gpu/drm/xe/xe_ggtt.c
866
u8 tile_id = ggtt->tile->id;
drivers/gpu/drm/xe/xe_ggtt.c
869
if (xe_bo_is_vram(bo) && ggtt->flags & XE_GGTT_FLAGS_64K)
drivers/gpu/drm/xe/xe_ggtt.c
874
xe_tile_assert(ggtt->tile, bo->ggtt_node[tile_id]->base.size == xe_bo_size(bo));
drivers/gpu/drm/xe/xe_ggtt.c
882
xe_pm_runtime_get_noresume(tile_to_xe(ggtt->tile));
drivers/gpu/drm/xe/xe_ggtt.c
884
bo->ggtt_node[tile_id] = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
891
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
892
err = drm_mm_insert_node_in_range(&ggtt->mm, &bo->ggtt_node[tile_id]->base,
drivers/gpu/drm/xe/xe_ggtt.c
899
u16 pat_index = tile_to_xe(ggtt->tile)->pat.idx[cache_mode];
drivers/gpu/drm/xe/xe_ggtt.c
900
u64 pte = ggtt->pt_ops->pte_encode_flags(bo, pat_index);
drivers/gpu/drm/xe/xe_ggtt.c
902
xe_ggtt_map_bo(ggtt, bo->ggtt_node[tile_id], bo, pte);
drivers/gpu/drm/xe/xe_ggtt.c
904
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
907
xe_ggtt_invalidate(ggtt);
drivers/gpu/drm/xe/xe_ggtt.c
910
xe_pm_runtime_put(tile_to_xe(ggtt->tile));
drivers/gpu/drm/xe/xe_ggtt.c
925
int xe_ggtt_insert_bo_at(struct xe_ggtt *ggtt, struct xe_bo *bo,
drivers/gpu/drm/xe/xe_ggtt.c
928
return __xe_ggtt_insert_bo_at(ggtt, bo, start, end, exec);
drivers/gpu/drm/xe/xe_ggtt.c
939
int xe_ggtt_insert_bo(struct xe_ggtt *ggtt, struct xe_bo *bo,
drivers/gpu/drm/xe/xe_ggtt.c
942
return __xe_ggtt_insert_bo_at(ggtt, bo, 0, U64_MAX, exec);
drivers/gpu/drm/xe/xe_ggtt.c
950
void xe_ggtt_remove_bo(struct xe_ggtt *ggtt, struct xe_bo *bo)
drivers/gpu/drm/xe/xe_ggtt.c
952
u8 tile_id = ggtt->tile->id;
drivers/gpu/drm/xe/xe_ggtt.c
958
xe_tile_assert(ggtt->tile, bo->ggtt_node[tile_id]->base.size == xe_bo_size(bo));
drivers/gpu/drm/xe/xe_ggtt.c
972
u64 xe_ggtt_largest_hole(struct xe_ggtt *ggtt, u64 alignment, u64 *spare)
drivers/gpu/drm/xe/xe_ggtt.c
974
const struct drm_mm *mm = &ggtt->mm;
drivers/gpu/drm/xe/xe_ggtt.c
979
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.c
981
hole_start = max(hole_start, ggtt->start);
drivers/gpu/drm/xe/xe_ggtt.c
992
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_ggtt.h
16
int xe_ggtt_init_early(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
17
int xe_ggtt_init_kunit(struct xe_ggtt *ggtt, u32 reserved, u32 size);
drivers/gpu/drm/xe/xe_ggtt.h
18
int xe_ggtt_init(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
20
struct xe_ggtt_node *xe_ggtt_node_init(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
25
void xe_ggtt_shift_nodes_locked(struct xe_ggtt *ggtt, s64 shift);
drivers/gpu/drm/xe/xe_ggtt.h
26
u64 xe_ggtt_start(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
27
u64 xe_ggtt_size(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
31
xe_ggtt_node_insert_transform(struct xe_ggtt *ggtt,
drivers/gpu/drm/xe/xe_ggtt.h
38
void xe_ggtt_map_bo_unlocked(struct xe_ggtt *ggtt, struct xe_bo *bo);
drivers/gpu/drm/xe/xe_ggtt.h
39
int xe_ggtt_insert_bo(struct xe_ggtt *ggtt, struct xe_bo *bo, struct drm_exec *exec);
drivers/gpu/drm/xe/xe_ggtt.h
40
int xe_ggtt_insert_bo_at(struct xe_ggtt *ggtt, struct xe_bo *bo,
drivers/gpu/drm/xe/xe_ggtt.h
42
void xe_ggtt_remove_bo(struct xe_ggtt *ggtt, struct xe_bo *bo);
drivers/gpu/drm/xe/xe_ggtt.h
43
u64 xe_ggtt_largest_hole(struct xe_ggtt *ggtt, u64 alignment, u64 *spare);
drivers/gpu/drm/xe/xe_ggtt.h
45
int xe_ggtt_dump(struct xe_ggtt *ggtt, struct drm_printer *p);
drivers/gpu/drm/xe/xe_ggtt.h
46
u64 xe_ggtt_print_holes(struct xe_ggtt *ggtt, u64 alignment, struct drm_printer *p);
drivers/gpu/drm/xe/xe_ggtt.h
55
static inline void xe_ggtt_might_lock(struct xe_ggtt *ggtt)
drivers/gpu/drm/xe/xe_ggtt.h
58
void xe_ggtt_might_lock(struct xe_ggtt *ggtt);
drivers/gpu/drm/xe/xe_ggtt.h
61
u64 xe_ggtt_encode_pte_flags(struct xe_ggtt *ggtt, struct xe_bo *bo, u16 pat_index);
drivers/gpu/drm/xe/xe_ggtt.h
62
u64 xe_ggtt_read_pte(struct xe_ggtt *ggtt, u64 offset);
drivers/gpu/drm/xe/xe_ggtt_types.h
60
typedef void (*xe_ggtt_set_pte_fn)(struct xe_ggtt *ggtt, u64 addr, u64 pte);
drivers/gpu/drm/xe/xe_ggtt_types.h
61
typedef void (*xe_ggtt_transform_cb)(struct xe_ggtt *ggtt,
drivers/gpu/drm/xe/xe_ggtt_types.h
77
u64 (*ggtt_get_pte)(struct xe_ggtt *ggtt, u64 addr);
drivers/gpu/drm/xe/xe_gt.c
542
err = xe_ggtt_init(gt_to_tile(gt)->mem.ggtt);
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
3169
struct xe_ggtt *ggtt = gt_to_tile(gt)->mem.ggtt;
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
3179
total = xe_ggtt_print_holes(ggtt, alignment, p);
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
394
u64 ggtt_start = xe_ggtt_start(gt_to_tile(gt)->mem.ggtt);
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
395
u64 ggtt_size = xe_ggtt_size(gt_to_tile(gt)->mem.ggtt);
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
510
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
536
node = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
715
struct xe_ggtt *ggtt = gt_to_tile(gt)->mem.ggtt;
drivers/gpu/drm/xe/xe_gt_sriov_pf_config.c
720
max_hole = xe_ggtt_largest_hole(ggtt, alignment, &spare);
drivers/gpu/drm/xe/xe_gt_sriov_vf.c
491
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/xe_gt_sriov_vf.c
499
guard(mutex)(&ggtt->lock);
drivers/gpu/drm/xe/xe_guc_tlb_inval.c
222
.ggtt = send_tlb_inval_ggtt,
drivers/gpu/drm/xe/xe_oa.c
644
job->ggtt = true;
drivers/gpu/drm/xe/xe_ring_ops.c
230
if (job->q->vm && !job->ggtt)
drivers/gpu/drm/xe/xe_sched_job_types.h
67
bool ggtt;
drivers/gpu/drm/xe/xe_tile.c
94
tile->mem.ggtt = xe_ggtt_alloc(tile);
drivers/gpu/drm/xe/xe_tile.c
95
if (!tile->mem.ggtt)
drivers/gpu/drm/xe/xe_tile_debugfs.c
105
{ "ggtt", .show = xe_tile_debugfs_show_with_rpm, .data = ggtt },
drivers/gpu/drm/xe/xe_tile_debugfs.c
92
return xe_ggtt_dump(tile->mem.ggtt, p);
drivers/gpu/drm/xe/xe_tile_sriov_pf_debugfs.c
165
DEFINE_SRIOV_TILE_CONFIG_DEBUGFS_ATTRIBUTE(ggtt, ggtt, u64, "%llu\n");
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
102
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
105
mutex_lock(&ggtt->lock);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
107
mutex_unlock(&ggtt->lock);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
126
mutex_lock(&tile->mem.ggtt->lock);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
128
mutex_unlock(&tile->mem.ggtt->lock);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
19
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
23
tile->sriov.vf.ggtt_balloon[0] = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
244
struct xe_ggtt *ggtt = tile->mem.ggtt;
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
246
lockdep_assert_held(&ggtt->lock);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
249
xe_ggtt_shift_nodes_locked(ggtt, shift);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
27
tile->sriov.vf.ggtt_balloon[1] = xe_ggtt_node_init(ggtt);
drivers/gpu/drm/xe/xe_tile_sriov_vf.c
53
lockdep_assert_held(&tile->mem.ggtt->lock);
drivers/gpu/drm/xe/xe_tlb_inval.c
303
ret = xe_tlb_inval_issue(tlb_inval, fence_ptr, tlb_inval->ops->ggtt);
drivers/gpu/drm/xe/xe_tlb_inval_types.h
35
int (*ggtt)(struct xe_tlb_inval *tlb_inval, u32 seqno);