sys/dev/pci/drm/amd/amdgpu/amdgpu_kms.c
829
mem.gtt.total_heap_size = gtt_man->size;
sys/dev/pci/drm/amd/amdgpu/amdgpu_kms.c
830
mem.gtt.usable_heap_size = mem.gtt.total_heap_size -
sys/dev/pci/drm/amd/amdgpu/amdgpu_kms.c
832
mem.gtt.heap_usage = ttm_resource_manager_usage(gtt_man);
sys/dev/pci/drm/amd/amdgpu/amdgpu_kms.c
833
mem.gtt.max_allocation = mem.gtt.usable_heap_size * 3 / 4;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1000
gtt->ttm.dma_address, flags);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1001
gtt->bound = true;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1017
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1047
gtt->offset = (u64)tmp->start << PAGE_SHIFT;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1084
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1087
if (gtt->userptr) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1089
} else if (ttm->sg && drm_gem_is_imported(gtt->gobj)) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1092
attach = gtt->gobj->import_attach;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1101
if (!gtt->bound)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1104
if (gtt->offset == AMDGPU_BO_INVALID_OFFSET)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1108
amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1109
gtt->bound = false;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1115
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1118
if (gtt->usertask)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1119
put_task_struct(gtt->usertask);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1122
ttm_tt_fini(>t->ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1123
kfree(gtt);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1139
struct amdgpu_ttm_tt *gtt;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1142
gtt = kzalloc(sizeof(struct amdgpu_ttm_tt), GFP_KERNEL);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1143
if (!gtt)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1146
gtt->gobj = &bo->base;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1148
gtt->pool_id = KFD_XCP_MEM_ID(adev, abo->xcp_id);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1150
gtt->pool_id = abo->xcp_id;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1158
if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1159
kfree(gtt);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1162
return >t->ttm;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1176
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1182
if (gtt->userptr) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1192
if (adev->mman.ttm_pools && gtt->pool_id >= 0)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1193
pool = &adev->mman.ttm_pools[gtt->pool_id];
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1217
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1225
if (gtt->userptr) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1244
if (adev->mman.ttm_pools && gtt->pool_id >= 0)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1245
pool = &adev->mman.ttm_pools[gtt->pool_id];
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1262
struct amdgpu_ttm_tt *gtt;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1267
gtt = (void *)tbo->ttm;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1268
*user_addr = gtt->userptr;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1287
struct amdgpu_ttm_tt *gtt;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1299
gtt = ttm_to_amdgpu_ttm_tt(bo->ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1300
gtt->userptr = addr;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1301
gtt->userflags = flags;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1304
if (gtt->usertask)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1305
put_task_struct(gtt->usertask);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1306
gtt->usertask = current->group_leader;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1307
get_task_struct(gtt->usertask);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1318
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1320
if (gtt == NULL)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1323
if (gtt->usertask == NULL)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1327
return gtt->usertask->mm;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1342
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1345
if (gtt == NULL || !gtt->userptr)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1351
size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1352
if (gtt->userptr > end || gtt->userptr + size <= start)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1356
*userptr = gtt->userptr;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1365
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1367
if (gtt == NULL || !gtt->userptr)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1378
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1380
if (gtt == NULL)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
1383
return !!(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
715
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
716
unsigned long start = gtt->userptr;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
740
if (unlikely((gtt->userflags & AMDGPU_GEM_USERPTR_ANONONLY) &&
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
764
struct amdgpu_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
766
if (gtt && gtt->userptr && range)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
779
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
781
if (!gtt || !gtt->userptr || !range)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
785
gtt->userptr, ttm->num_pages);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
820
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
821
int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
839
drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
862
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
863
int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
887
struct amdgpu_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
900
gtt->offset + (page_idx << PAGE_SHIFT),
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
901
1, >t->ttm.dma_address[page_idx], flags);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
907
gtt->offset + ((page_idx + 1) << PAGE_SHIFT),
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
909
>t->ttm.dma_address[page_idx + 1],
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
920
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
928
amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages,
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
929
gtt->ttm.dma_address, flags);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
931
gtt->bound = true;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
945
struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm);
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
952
if (gtt->bound)
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
955
if (gtt->userptr) {
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
966
attach = gtt->gobj->import_attach;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
979
drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
990
gtt->offset = AMDGPU_BO_INVALID_OFFSET;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
998
gtt->offset = (u64)bo_mem->start << PAGE_SHIFT;
sys/dev/pci/drm/amd/amdgpu/amdgpu_ttm.c
999
amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages,
sys/dev/pci/drm/i915/display/intel_display_types.h
111
struct i915_gtt_view gtt;
sys/dev/pci/drm/i915/display/intel_dpt.c
255
size = intel_remapped_info_size(&fb->remapped_view.gtt.remapped);
sys/dev/pci/drm/i915/display/intel_fb.c
1297
plane_state->view.gtt.type == I915_GTT_VIEW_NORMAL);
sys/dev/pci/drm/i915/display/intel_fb.c
1493
struct intel_remapped_plane_info *remap_info = &view->gtt.remapped.plane[color_plane];
sys/dev/pci/drm/i915/display/intel_fb.c
1519
if (view->gtt.type == I915_GTT_VIEW_ROTATED) {
sys/dev/pci/drm/i915/display/intel_fb.c
1521
check_array_bounds(display, view->gtt.rotated.plane, color_plane);
sys/dev/pci/drm/i915/display/intel_fb.c
1544
drm_WARN_ON(display->drm, view->gtt.type != I915_GTT_VIEW_REMAPPED);
sys/dev/pci/drm/i915/display/intel_fb.c
1546
check_array_bounds(display, view->gtt.remapped.plane, color_plane);
sys/dev/pci/drm/i915/display/intel_fb.c
1548
if (view->gtt.remapped.plane_alignment) {
sys/dev/pci/drm/i915/display/intel_fb.c
1550
view->gtt.remapped.plane_alignment);
sys/dev/pci/drm/i915/display/intel_fb.c
1643
view->gtt.type = view_type;
sys/dev/pci/drm/i915/display/intel_fb.c
1647
view->gtt.remapped.plane_alignment = SZ_2M / PAGE_SIZE;
sys/dev/pci/drm/i915/display/intel_fb_pin.c
271
vma = intel_fb_pin_to_ggtt(&fb->base, &plane_state->view.gtt,
sys/dev/pci/drm/i915/display/intel_fb_pin.c
291
vma = intel_fb_pin_to_dpt(&fb->base, &plane_state->view.gtt,
sys/dev/pci/drm/i915/display/intel_fbdev.c
299
vma = intel_fb_pin_to_ggtt(&fb->base, &fb->normal_view.gtt,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
867
unsigned int gtt;
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
873
.gtt = I915_GTT_PAGE_SIZE_64K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
878
.gtt = I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
883
.gtt = I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
888
.gtt = I915_GTT_PAGE_SIZE_64K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
893
.gtt = I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
898
.gtt = I915_GTT_PAGE_SIZE_64K | I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
903
.gtt = I915_GTT_PAGE_SIZE_64K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
908
.gtt = I915_GTT_PAGE_SIZE_64K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
915
.gtt = I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
920
.gtt = I915_GTT_PAGE_SIZE_4K,
sys/dev/pci/drm/i915/gem/selftests/huge_pages.c
949
unsigned int expected_gtt = objects[i].gtt;
sys/dev/pci/drm/i915/gt/intel_gtt.h
621
void i915_ggtt_suspend(struct i915_ggtt *gtt);
sys/dev/pci/drm/i915/gvt/cmd_parser.c
1825
s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm;
sys/dev/pci/drm/i915/gvt/cmd_parser.c
1908
s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm;
sys/dev/pci/drm/i915/gvt/cmd_parser.c
2962
ret = copy_gma_to_hva(vgpu, vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/i915/gvt/cmd_parser.c
2973
ret = copy_gma_to_hva(vgpu, vgpu->gtt.ggtt_mm, gma_head, gma_tail,
sys/dev/pci/drm/i915/gvt/cmd_parser.c
3036
workload->vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/i915/gvt/execlist.c
159
hwsp_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/i915/gvt/fb_decoder.c
262
plane->base_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, plane->base);
sys/dev/pci/drm/i915/gvt/fb_decoder.c
386
plane->base_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, plane->base);
sys/dev/pci/drm/i915/gvt/gtt.c
1009
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1073
const struct intel_gvt_gtt_pte_ops *ops = s->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1089
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1145
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1176
const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1261
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1274
vgpu->gtt.scratch_pt[spt->shadow_page.type].page_mfn)
sys/dev/pci/drm/i915/gvt/gtt.c
1342
const struct intel_gvt_gtt_pte_ops *ops = gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1394
list_move_tail(&oos_page->list, &gvt->gtt.oos_page_free_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1414
list_move_tail(&oos_page->list, &gvt->gtt.oos_page_use_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1440
struct intel_gvt_gtt *gtt = &gvt->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
1446
if (list_empty(>t->oos_page_free_list_head)) {
sys/dev/pci/drm/i915/gvt/gtt.c
1447
oos_page = container_of(gtt->oos_page_use_list_head.next,
sys/dev/pci/drm/i915/gvt/gtt.c
1456
oos_page = container_of(gtt->oos_page_free_list_head.next,
sys/dev/pci/drm/i915/gvt/gtt.c
1471
list_add_tail(&oos_page->vm_list, &spt->vgpu->gtt.oos_page_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1494
list_for_each_safe(pos, n, &vgpu->gtt.oos_page_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
1513
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1545
vgpu->gtt.scratch_pt[type].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
1552
vgpu->gtt.scratch_pt[type].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
1556
vgpu->gtt.scratch_pt[type].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
1585
&spt->vgpu->gtt.post_shadow_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1606
list_for_each_safe(pos, n, &vgpu->gtt.post_shadow_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
1630
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1664
ops->set_pfn(&se, vgpu->gtt.scratch_pt[type].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
1694
struct intel_gvt_gtt *gtt = &gvt->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
1695
const struct intel_gvt_gtt_pte_ops *ops = gtt->pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1724
struct intel_gvt_gtt *gtt = &gvt->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
1725
const struct intel_gvt_gtt_pte_ops *ops = gtt->pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
1831
list_add_tail(&mm->ppgtt_mm.list, &vgpu->gtt.ppgtt_mm_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1833
mutex_lock(&gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1834
list_add_tail(&mm->ppgtt_mm.lru_list, &gvt->gtt.ppgtt_mm_lru_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1835
mutex_unlock(&gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1895
mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1897
mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1942
mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1944
&mm->vgpu->gvt->gtt.ppgtt_mm_lru_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
1945
mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1956
mutex_lock(&gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1958
list_for_each_safe(pos, n, &gvt->gtt.ppgtt_mm_lru_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
1965
mutex_unlock(&gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1969
mutex_unlock(&gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
1980
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2009
const struct intel_gvt_gtt_pte_ops *pte_ops = gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2010
const struct intel_gvt_gtt_gma_ops *gma_ops = gvt->gtt.gma_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2082
struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm;
sys/dev/pci/drm/i915/gvt/gtt.c
2134
const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2138
if (pfn != vgpu->gvt->gtt.scratch_mfn)
sys/dev/pci/drm/i915/gvt/gtt.c
2147
struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm;
sys/dev/pci/drm/i915/gvt/gtt.c
2148
const struct intel_gvt_gtt_pte_ops *ops = gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2230
ops->set_pfn(&m, gvt->gtt.scratch_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
2234
ops->set_pfn(&m, gvt->gtt.scratch_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
2293
struct intel_vgpu_gtt *gtt = &vgpu->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
2294
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2318
gtt->scratch_pt[type].page_mfn =
sys/dev/pci/drm/i915/gvt/gtt.c
2320
gtt->scratch_pt[type].page = virt_to_page(scratch_pt);
sys/dev/pci/drm/i915/gvt/gtt.c
2322
vgpu->id, type, gtt->scratch_pt[type].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
2337
ops->set_pfn(&se, gtt->scratch_pt[type - 1].page_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
2360
if (vgpu->gtt.scratch_pt[i].page != NULL) {
sys/dev/pci/drm/i915/gvt/gtt.c
2361
daddr = (dma_addr_t)(vgpu->gtt.scratch_pt[i].page_mfn <<
sys/dev/pci/drm/i915/gvt/gtt.c
2364
__free_page(vgpu->gtt.scratch_pt[i].page);
sys/dev/pci/drm/i915/gvt/gtt.c
2365
vgpu->gtt.scratch_pt[i].page = NULL;
sys/dev/pci/drm/i915/gvt/gtt.c
2366
vgpu->gtt.scratch_pt[i].page_mfn = 0;
sys/dev/pci/drm/i915/gvt/gtt.c
2402
struct intel_vgpu_gtt *gtt = &vgpu->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
2404
INIT_RADIX_TREE(>t->spt_tree, GFP_KERNEL);
sys/dev/pci/drm/i915/gvt/gtt.c
2406
INIT_LIST_HEAD(>t->ppgtt_mm_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2407
INIT_LIST_HEAD(>t->oos_page_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2408
INIT_LIST_HEAD(>t->post_shadow_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2410
gtt->ggtt_mm = intel_vgpu_create_ggtt_mm(vgpu);
sys/dev/pci/drm/i915/gvt/gtt.c
2411
if (IS_ERR(gtt->ggtt_mm)) {
sys/dev/pci/drm/i915/gvt/gtt.c
2413
return PTR_ERR(gtt->ggtt_mm);
sys/dev/pci/drm/i915/gvt/gtt.c
2418
INIT_LIST_HEAD(>t->ggtt_mm->ggtt_mm.partial_pte_list);
sys/dev/pci/drm/i915/gvt/gtt.c
2428
list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
2433
if (GEM_WARN_ON(!list_empty(&vgpu->gtt.ppgtt_mm_list_head)))
sys/dev/pci/drm/i915/gvt/gtt.c
2436
if (GEM_WARN_ON(!radix_tree_empty(&vgpu->gtt.spt_tree))) {
sys/dev/pci/drm/i915/gvt/gtt.c
2447
&vgpu->gtt.ggtt_mm->ggtt_mm.partial_pte_list,
sys/dev/pci/drm/i915/gvt/gtt.c
2453
intel_vgpu_destroy_mm(vgpu->gtt.ggtt_mm);
sys/dev/pci/drm/i915/gvt/gtt.c
2454
vgpu->gtt.ggtt_mm = NULL;
sys/dev/pci/drm/i915/gvt/gtt.c
2476
struct intel_gvt_gtt *gtt = &gvt->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
2480
WARN(!list_empty(>t->oos_page_use_list_head),
sys/dev/pci/drm/i915/gvt/gtt.c
2483
list_for_each_safe(pos, n, >t->oos_page_free_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
2493
struct intel_gvt_gtt *gtt = &gvt->gtt;
sys/dev/pci/drm/i915/gvt/gtt.c
2498
INIT_LIST_HEAD(>t->oos_page_free_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2499
INIT_LIST_HEAD(>t->oos_page_use_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2517
list_add_tail(&oos_page->list, >t->oos_page_free_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2544
list_for_each(pos, &vgpu->gtt.ppgtt_mm_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
2633
gvt->gtt.pte_ops = &gen8_gtt_pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2634
gvt->gtt.gma_ops = &gen8_gtt_gma_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2650
gvt->gtt.scratch_page = virt_to_page(page);
sys/dev/pci/drm/i915/gvt/gtt.c
2651
gvt->gtt.scratch_mfn = (unsigned long)(daddr >> I915_GTT_PAGE_SHIFT);
sys/dev/pci/drm/i915/gvt/gtt.c
2658
__free_page(gvt->gtt.scratch_page);
sys/dev/pci/drm/i915/gvt/gtt.c
2662
INIT_LIST_HEAD(&gvt->gtt.ppgtt_mm_lru_list_head);
sys/dev/pci/drm/i915/gvt/gtt.c
2663
rw_init(&gvt->gtt.ppgtt_mm_lock, "gvtmm");
sys/dev/pci/drm/i915/gvt/gtt.c
2678
dma_addr_t daddr = (dma_addr_t)(gvt->gtt.scratch_mfn <<
sys/dev/pci/drm/i915/gvt/gtt.c
2683
__free_page(gvt->gtt.scratch_page);
sys/dev/pci/drm/i915/gvt/gtt.c
2701
list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) {
sys/dev/pci/drm/i915/gvt/gtt.c
2704
mutex_lock(&vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
2706
mutex_unlock(&vgpu->gvt->gtt.ppgtt_mm_lock);
sys/dev/pci/drm/i915/gvt/gtt.c
2725
const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
2731
pte_ops->set_pfn(&entry, gvt->gtt.scratch_mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
2738
ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index);
sys/dev/pci/drm/i915/gvt/gtt.c
2741
ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++);
sys/dev/pci/drm/i915/gvt/gtt.c
2748
ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index);
sys/dev/pci/drm/i915/gvt/gtt.c
2751
ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++);
sys/dev/pci/drm/i915/gvt/gtt.c
2775
mm = vgpu->gtt.ggtt_mm;
sys/dev/pci/drm/i915/gvt/gtt.c
490
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
517
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
533
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
545
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
556
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
566
const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
592
const struct intel_gvt_gtt_pte_ops *ops = gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
621
const struct intel_gvt_gtt_pte_ops *ops = gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
684
radix_tree_delete(&spt->vgpu->gtt.spt_tree, spt->shadow_page.mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
705
radix_tree_for_each_slot(slot, &vgpu->gtt.spt_tree, &iter, 0) {
sys/dev/pci/drm/i915/gvt/gtt.c
753
return radix_tree_lookup(&vgpu->gtt.spt_tree, mfn);
sys/dev/pci/drm/i915/gvt/gtt.c
795
ret = radix_tree_insert(&vgpu->gtt.spt_tree, spt->shadow_page.mfn, spt);
sys/dev/pci/drm/i915/gvt/gtt.c
849
spt->vgpu->gvt->gtt.pte_ops->test_present(e))
sys/dev/pci/drm/i915/gvt/gtt.c
855
spt->vgpu->gvt->gtt.pte_ops->test_present(e))
sys/dev/pci/drm/i915/gvt/gtt.c
884
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
905
vgpu->gtt.scratch_pt[cur_pt_type].page_mfn)
sys/dev/pci/drm/i915/gvt/gtt.c
921
const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
sys/dev/pci/drm/i915/gvt/gtt.c
929
if (!pfn || pfn == vgpu->gtt.scratch_pt[type].page_mfn)
sys/dev/pci/drm/i915/gvt/gvt.h
204
struct intel_vgpu_gtt gtt;
sys/dev/pci/drm/i915/gvt/gvt.h
338
struct intel_gvt_gtt gtt;
sys/dev/pci/drm/i915/gvt/mmio.c
89
pt = vgpu->gtt.ggtt_mm->ggtt_mm.virtual_ggtt + offset;
sys/dev/pci/drm/i915/gvt/scheduler.c
1641
ring_context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/i915/gvt/scheduler.c
226
context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/i915/gvt/scheduler.c
989
context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
sys/dev/pci/drm/include/uapi/drm/amdgpu_drm.h
1399
struct drm_amdgpu_heap_info gtt;
sys/dev/pci/drm/radeon/r100.c
712
u32 *gtt = rdev->gart.ptr;
sys/dev/pci/drm/radeon/r100.c
713
gtt[i] = cpu_to_le32(lower_32_bits(entry));
sys/dev/pci/drm/radeon/radeon_ttm.c
337
struct radeon_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
341
int write = !(gtt->userflags & RADEON_GEM_USERPTR_READONLY);
sys/dev/pci/drm/radeon/radeon_ttm.c
345
if (current->mm != gtt->usermm)
sys/dev/pci/drm/radeon/radeon_ttm.c
348
if (gtt->userflags & RADEON_GEM_USERPTR_ANONONLY) {
sys/dev/pci/drm/radeon/radeon_ttm.c
351
unsigned long end = gtt->userptr + (u64)ttm->num_pages * PAGE_SIZE;
sys/dev/pci/drm/radeon/radeon_ttm.c
353
vma = find_vma(gtt->usermm, gtt->userptr);
sys/dev/pci/drm/radeon/radeon_ttm.c
360
uint64_t userptr = gtt->userptr + pinned * PAGE_SIZE;
sys/dev/pci/drm/radeon/radeon_ttm.c
382
drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
sys/dev/pci/drm/radeon/radeon_ttm.c
401
struct radeon_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
404
int write = !(gtt->userflags & RADEON_GEM_USERPTR_READONLY);
sys/dev/pci/drm/radeon/radeon_ttm.c
417
if (!(gtt->userflags & RADEON_GEM_USERPTR_READONLY))
sys/dev/pci/drm/radeon/radeon_ttm.c
430
struct radeon_ttm_tt *gtt = (void*)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
432
return (gtt->bound);
sys/dev/pci/drm/radeon/radeon_ttm.c
439
struct radeon_ttm_tt *gtt = (void*)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
445
if (gtt->bound)
sys/dev/pci/drm/radeon/radeon_ttm.c
448
if (gtt->userptr) {
sys/dev/pci/drm/radeon/radeon_ttm.c
453
gtt->offset = (unsigned long)(bo_mem->start << PAGE_SHIFT);
sys/dev/pci/drm/radeon/radeon_ttm.c
460
r = radeon_gart_bind(rdev, gtt->offset, ttm->num_pages,
sys/dev/pci/drm/radeon/radeon_ttm.c
461
ttm->pages, gtt->ttm.dma_address, flags);
sys/dev/pci/drm/radeon/radeon_ttm.c
464
ttm->num_pages, (unsigned)gtt->offset);
sys/dev/pci/drm/radeon/radeon_ttm.c
467
gtt->bound = true;
sys/dev/pci/drm/radeon/radeon_ttm.c
473
struct radeon_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
476
if (gtt->userptr)
sys/dev/pci/drm/radeon/radeon_ttm.c
479
if (!gtt->bound)
sys/dev/pci/drm/radeon/radeon_ttm.c
482
radeon_gart_unbind(rdev, gtt->offset, ttm->num_pages);
sys/dev/pci/drm/radeon/radeon_ttm.c
484
gtt->bound = false;
sys/dev/pci/drm/radeon/radeon_ttm.c
489
struct radeon_ttm_tt *gtt = (void *)ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
491
ttm_tt_fini(>t->ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
492
kfree(gtt);
sys/dev/pci/drm/radeon/radeon_ttm.c
498
struct radeon_ttm_tt *gtt;
sys/dev/pci/drm/radeon/radeon_ttm.c
510
gtt = kzalloc(sizeof(struct radeon_ttm_tt), GFP_KERNEL);
sys/dev/pci/drm/radeon/radeon_ttm.c
511
if (gtt == NULL) {
sys/dev/pci/drm/radeon/radeon_ttm.c
522
if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) {
sys/dev/pci/drm/radeon/radeon_ttm.c
523
kfree(gtt);
sys/dev/pci/drm/radeon/radeon_ttm.c
526
return >t->ttm;
sys/dev/pci/drm/radeon/radeon_ttm.c
547
struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
550
if (gtt && gtt->userptr) {
sys/dev/pci/drm/radeon/radeon_ttm.c
560
drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
sys/dev/pci/drm/radeon/radeon_ttm.c
571
struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
576
if (gtt && gtt->userptr) {
sys/dev/pci/drm/radeon/radeon_ttm.c
595
struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
597
if (gtt == NULL)
sys/dev/pci/drm/radeon/radeon_ttm.c
600
gtt->userptr = addr;
sys/dev/pci/drm/radeon/radeon_ttm.c
601
gtt->usermm = current->mm;
sys/dev/pci/drm/radeon/radeon_ttm.c
602
gtt->userflags = flags;
sys/dev/pci/drm/radeon/radeon_ttm.c
667
struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
669
if (gtt == NULL)
sys/dev/pci/drm/radeon/radeon_ttm.c
672
return !!gtt->userptr;
sys/dev/pci/drm/radeon/radeon_ttm.c
678
struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
sys/dev/pci/drm/radeon/radeon_ttm.c
680
if (gtt == NULL)
sys/dev/pci/drm/radeon/radeon_ttm.c
683
return !!(gtt->userflags & RADEON_GEM_USERPTR_READONLY);
sys/dev/pci/drm/radeon/rs400.c
237
u32 *gtt = rdev->gart.ptr;
sys/dev/pci/drm/radeon/rs400.c
238
gtt[i] = cpu_to_le32(lower_32_bits(entry));
usr.bin/awk/awk.h
271
gtt *gototab;
usr.bin/awk/b.c
150
gtt *p;
usr.bin/awk/b.c
160
p = (gtt *) reallocarray(f->gototab, new_count, sizeof(gtt));
usr.bin/awk/b.c
666
gtt *tab = & f->gototab[state];
usr.bin/awk/b.c
693
gtt *tab = & f->gototab[state];