arch/x86/kvm/svm/sev.c
1047
void __user *measure = u64_to_user_ptr(argp->data);
arch/x86/kvm/svm/sev.c
1066
p = u64_to_user_ptr(params.uaddr);
arch/x86/kvm/svm/sev.c
1137
if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, sizeof(params)))
arch/x86/kvm/svm/sev.c
1301
if (copy_from_user(&debug, u64_to_user_ptr(argp->data), sizeof(debug)))
arch/x86/kvm/svm/sev.c
1384
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
1448
void __user *report = u64_to_user_ptr(argp->data);
arch/x86/kvm/svm/sev.c
1458
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
1467
p = u64_to_user_ptr(params.uaddr);
arch/x86/kvm/svm/sev.c
1519
if (copy_to_user(u64_to_user_ptr(argp->data), params,
arch/x86/kvm/svm/sev.c
1537
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data),
arch/x86/kvm/svm/sev.c
1592
if (!ret && copy_to_user(u64_to_user_ptr(params.session_uaddr),
arch/x86/kvm/svm/sev.c
1600
if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms,
arch/x86/kvm/svm/sev.c
1630
if (copy_to_user(u64_to_user_ptr(argp->data), params,
arch/x86/kvm/svm/sev.c
1649
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data),
arch/x86/kvm/svm/sev.c
1700
if (copy_to_user(u64_to_user_ptr(params.trans_uaddr),
arch/x86/kvm/svm/sev.c
1707
if (copy_to_user(u64_to_user_ptr(params.hdr_uaddr), hdr,
arch/x86/kvm/svm/sev.c
1757
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data),
arch/x86/kvm/svm/sev.c
1799
if (copy_to_user(u64_to_user_ptr(argp->data),
arch/x86/kvm/svm/sev.c
1829
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data),
arch/x86/kvm/svm/sev.c
2218
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
2367
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
2381
src = params.type == KVM_SEV_SNP_PAGE_TYPE_ZERO ? NULL : u64_to_user_ptr(params.uaddr);
arch/x86/kvm/svm/sev.c
2432
if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, sizeof(params)))
arch/x86/kvm/svm/sev.c
2504
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
566
if (copy_from_user(&data, u64_to_user_ptr(argp->data), sizeof(data)))
arch/x86/kvm/svm/sev.c
615
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/svm/sev.c
659
if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, sizeof(params))) {
arch/x86/kvm/svm/sev.c
819
if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params)))
arch/x86/kvm/vmx/tdx.c
2214
user_caps = u64_to_user_ptr(cmd->data);
arch/x86/kvm/vmx/tdx.c
2708
struct kvm_tdx_init_vm __user *user_data = u64_to_user_ptr(cmd->data);
arch/x86/kvm/vmx/tdx.c
3013
output = u64_to_user_ptr(cmd->data);
arch/x86/kvm/vmx/tdx.c
3177
if (copy_from_user(®ion, u64_to_user_ptr(cmd->data), sizeof(region)))
arch/x86/kvm/vmx/tdx.c
3199
u64_to_user_ptr(region.source_addr),
arch/x86/kvm/vmx/tdx.c
3218
if (copy_to_user(u64_to_user_ptr(cmd->data), ®ion, sizeof(region)))
arch/x86/kvm/x86.c
5021
u64 __user *uaddr = u64_to_user_ptr(attr->addr);
arch/x86/kvm/x86.c
5923
u64 __user *uaddr = u64_to_user_ptr(attr->addr);
arch/x86/kvm/x86.c
5943
u64 __user *uaddr = u64_to_user_ptr(attr->addr);
arch/x86/kvm/x86.c
6155
user_val = u64_to_user_ptr(one_reg.addr);
arch/x86/kvm/xen.c
790
void __user * hva = u64_to_user_ptr(data->u.shared_info.hva);
block/blk-crypto.c
458
if (copy_from_user(raw_key, u64_to_user_ptr(arg.raw_key_ptr),
block/blk-crypto.c
471
if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key,
block/blk-crypto.c
506
if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key,
block/blk-crypto.c
536
if (copy_from_user(lt_key, u64_to_user_ptr(arg.lt_key_ptr),
block/blk-crypto.c
549
if (copy_to_user(u64_to_user_ptr(arg.eph_key_ptr), eph_key,
block/ioctl.c
473
keys_ptr = u64_to_user_ptr(read_keys.keys_ptr);
drivers/accel/amdxdna/aie2_error.c
412
if (copy_to_user(u64_to_user_ptr(args->buffer),
drivers/accel/amdxdna/aie2_pci.c
1104
if (copy_from_user(&power_state, u64_to_user_ptr(args->buffer),
drivers/accel/amdxdna/aie2_pci.c
1130
if (copy_from_user(&state, u64_to_user_ptr(args->buffer), sizeof(state)))
drivers/accel/amdxdna/aie2_pci.c
647
if (copy_from_user(&status, u64_to_user_ptr(args->buffer), sizeof(status))) {
drivers/accel/amdxdna/aie2_pci.c
658
ret = aie2_query_status(ndev, u64_to_user_ptr(status.buffer),
drivers/accel/amdxdna/aie2_pci.c
665
if (copy_to_user(u64_to_user_ptr(args->buffer), &status, sizeof(status))) {
drivers/accel/amdxdna/aie2_pci.c
711
if (copy_to_user(u64_to_user_ptr(args->buffer), meta, sizeof(*meta)))
drivers/accel/amdxdna/aie2_pci.c
729
if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version)))
drivers/accel/amdxdna/aie2_pci.c
746
if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version)))
drivers/accel/amdxdna/aie2_pci.c
762
if (copy_to_user(u64_to_user_ptr(args->buffer), &mode, sizeof(mode)))
drivers/accel/amdxdna/aie2_pci.c
787
if (copy_to_user(u64_to_user_ptr(args->buffer), clock, sizeof(*clock)))
drivers/accel/amdxdna/aie2_pci.c
823
buf = u64_to_user_ptr(array_args->buffer);
drivers/accel/amdxdna/aie2_pci.c
877
if (copy_to_user(u64_to_user_ptr(args->buffer), &res_info, sizeof(res_info)))
drivers/accel/amdxdna/aie2_pci.c
924
if (copy_from_user(header, u64_to_user_ptr(args->buffer), sizeof(*header))) {
drivers/accel/amdxdna/aie2_pci.c
938
u64_to_user_ptr(args->buffer + header_sz),
drivers/accel/amdxdna/aie2_pci.c
945
if (copy_to_user(u64_to_user_ptr(args->buffer), header, header_sz)) {
drivers/accel/amdxdna/aie2_pci.c
966
if (copy_to_user(u64_to_user_ptr(args->buffer), &state, sizeof(state)))
drivers/accel/amdxdna/amdxdna_ctx.c
199
if (copy_from_user(&hwctx->qos, u64_to_user_ptr(args->qos_p), sizeof(hwctx->qos))) {
drivers/accel/amdxdna/amdxdna_ctx.c
327
if (copy_from_user(buf, u64_to_user_ptr(val), buf_size)) {
drivers/accel/amdxdna/amdxdna_ctx.c
572
ret = copy_from_user(arg_bo_hdls, u64_to_user_ptr(args->args),
drivers/accel/amdxdna/amdxdna_gem.c
551
if (copy_from_user(&va_tbl, u64_to_user_ptr(args->vaddr), sizeof(va_tbl))) {
drivers/accel/amdxdna/amdxdna_gem.c
561
u64_to_user_ptr(args->vaddr + sizeof(va_tbl)));
drivers/accel/ethosu/ethosu_drv.c
46
return copy_struct_to_user(u64_to_user_ptr(args->pointer),
drivers/accel/habanalabs/common/command_submission.c
1917
u64_to_user_ptr(chunk->signal_seq_arr),
drivers/accel/habanalabs/common/command_submission.c
3540
if (copy_from_user(&completion_value, u64_to_user_ptr(user_address), 8)) {
drivers/accel/habanalabs/common/command_submission.c
3575
if (copy_from_user(&completion_value, u64_to_user_ptr(user_address), 8)) {
drivers/accel/habanalabs/common/habanalabs_ioctl.c
219
if (copy_from_user(input, u64_to_user_ptr(args->input_ptr),
drivers/accel/ivpu/ivpu_gem_userptr.c
169
void __user *user_ptr = u64_to_user_ptr(args->user_ptr);
drivers/accel/ivpu/ivpu_ms.c
205
ret = copy_samples_to_user(vdev, ms, u64_to_user_ptr(args->buffer_ptr),
drivers/accel/ivpu/ivpu_ms.c
299
if (copy_to_user(u64_to_user_ptr(args->buffer_ptr), ivpu_bo_vaddr(bo), info_size))
drivers/accel/qaic/qaic_control.c
1332
user_data = u64_to_user_ptr(user_msg->data);
drivers/accel/qaic/qaic_data.c
1015
user_data = u64_to_user_ptr(args->data);
drivers/accel/qaic/qaic_data.c
1347
exec = memdup_array_user(u64_to_user_ptr(args->data), args->hdr.count, size);
drivers/accel/qaic/qaic_data.c
1805
ent = memdup_array_user(u64_to_user_ptr(args->data), args->hdr.count, sizeof(*ent));
drivers/accel/qaic/qaic_data.c
1845
if (copy_to_user(u64_to_user_ptr(args->data), ent, args->hdr.count * sizeof(*ent)))
drivers/accel/rocket/rocket_job.c
563
ret = drm_gem_objects_lookup(file, u64_to_user_ptr(job->in_bo_handles),
drivers/accel/rocket/rocket_job.c
570
ret = drm_gem_objects_lookup(file, u64_to_user_ptr(job->out_bo_handles),
drivers/accel/rocket/rocket_job.c
620
u64_to_user_ptr(args->jobs) + i * args->job_struct_size,
drivers/accel/rocket/rocket_job.c
84
u64_to_user_ptr(job->tasks) + i * job->task_struct_size,
drivers/block/ublk_drv.c
1373
import_ubuf(dir, u64_to_user_ptr(io->buf.addr), rq_bytes, &iter);
drivers/block/ublk_drv.c
1394
import_ubuf(dir, u64_to_user_ptr(io->buf.addr), io->res, &iter);
drivers/block/ublk_drv.c
3617
.uaddr = u64_to_user_ptr(READ_ONCE(cmd->sqe->addr)),
drivers/block/ublk_drv.c
3698
.uaddr = u64_to_user_ptr(READ_ONCE(cmd->sqe->addr)),
drivers/crypto/ccp/sev-dev.c
2494
if (copy_from_user(&input, u64_to_user_ptr(argp->data), sizeof(input)))
drivers/cxl/core/mbox.c
369
mbox_cmd->payload_in = vmemdup_user(u64_to_user_ptr(in_payload),
drivers/cxl/core/mbox.c
623
if (copy_to_user(u64_to_user_ptr(out_payload),
drivers/dma-buf/sync_file.c
345
if (copy_to_user(u64_to_user_ptr(info.sync_fence_info), fence_info,
drivers/fwctl/main.c
120
if (copy_from_user(inbuf, u64_to_user_ptr(cmd->in), cmd->in_len))
drivers/fwctl/main.c
133
if (copy_to_user(u64_to_user_ptr(cmd->out), outbuf,
drivers/fwctl/main.c
66
if (clear_user(u64_to_user_ptr(cmd->out_device_data),
drivers/fwctl/main.c
75
if (copy_to_user_zero_pad(u64_to_user_ptr(cmd->out_device_data),
drivers/fwctl/pds/main.c
370
in_payload = memdup_user(u64_to_user_ptr(rpc->in.payload), rpc->in.len);
drivers/fwctl/pds/main.c
427
if (copy_to_user(u64_to_user_ptr(rpc->out.payload), out_payload, rpc->out.len)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c
187
const void __user *uptr = u64_to_user_ptr(in->bo_info_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1751
fences = memdup_array_user(u64_to_user_ptr(wait->in.fences),
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
188
chunk_array = memdup_array_user(u64_to_user_ptr(cs->in.chunks),
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
205
chunk_ptr = u64_to_user_ptr(chunk_array[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
217
p->chunks[i].kdata = vmemdup_array_user(u64_to_user_ptr(user_chunk.chunk_data),
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1046
void __user *out = u64_to_user_ptr(args->value);
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1125
if (copy_to_user(u64_to_user_ptr(args->value), vm_entries, num_mappings * sizeof(*vm_entries)))
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1218
if (copy_to_user(u64_to_user_ptr(args->entries), bo_entries, num_bos * sizeof(*bo_entries)))
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
60
syncobj_handles = memdup_user(u64_to_user_ptr(syncobj_handles_array),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
489
syncobj_handles = memdup_user(u64_to_user_ptr(args->syncobj_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
510
bo_handles_read = memdup_user(u64_to_user_ptr(args->bo_read_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
533
bo_handles_write = memdup_user(u64_to_user_ptr(args->bo_write_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
683
bo_handles_read = memdup_user(u64_to_user_ptr(wait_info->bo_read_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
689
bo_handles_write = memdup_user(u64_to_user_ptr(wait_info->bo_write_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
697
syncobj_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
705
timeline_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_handles),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
712
timeline_points = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_points),
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
984
if (copy_to_user(u64_to_user_ptr(wait_info->out_fences),
drivers/gpu/drm/amd/amdgpu/mes_userqueue.c
318
compute_mqd = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size);
drivers/gpu/drm/amd/amdgpu/mes_userqueue.c
356
mqd_gfx_v11 = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size);
drivers/gpu/drm/amd/amdgpu/mes_userqueue.c
391
mqd_sdma_v11 = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size);
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_virt_ras_cmd.c
228
copy_to_user(u64_to_user_ptr(req->buf_ptr), out_buf, offset)) {
drivers/gpu/drm/amd/ras/rascore/ras_cmd.c
238
copy_to_user(u64_to_user_ptr(req->buf_ptr), buffer, offset)) {
drivers/gpu/drm/drm_atomic_uapi.c
458
s32 __user *fence_ptr = u64_to_user_ptr(val);
drivers/gpu/drm/drm_atomic_uapi.c
924
s32 __user *fence_ptr = u64_to_user_ptr(val);
drivers/gpu/drm/drm_ioctl.c
576
user_ptr = u64_to_user_ptr(name->name);
drivers/gpu/drm/drm_lease.c
512
object_ids = memdup_array_user(u64_to_user_ptr(cl->object_ids),
drivers/gpu/drm/drm_mode_config.c
113
fb_id = u64_to_user_ptr(card_res->fb_id_ptr);
drivers/gpu/drm/drm_mode_config.c
131
crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr);
drivers/gpu/drm/drm_mode_config.c
143
encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr);
drivers/gpu/drm/drm_mode_config.c
154
connector_id = u64_to_user_ptr(card_res->connector_id_ptr);
drivers/gpu/drm/drm_plane.c
813
plane_ptr = u64_to_user_ptr(plane_resp->plane_id_ptr);
drivers/gpu/drm/drm_property.c
481
values_ptr = u64_to_user_ptr(out_resp->values_ptr);
drivers/gpu/drm/drm_property.c
492
enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr);
drivers/gpu/drm/drm_property.c
839
if (copy_to_user(u64_to_user_ptr(out_resp->data),
drivers/gpu/drm/drm_property.c
868
u64_to_user_ptr(out_resp->data),
drivers/gpu/drm/drm_syncobj.c
1251
u64_to_user_ptr(timeline_wait->points),
drivers/gpu/drm/drm_syncobj.c
1344
u64_to_user_ptr(args->handles),
drivers/gpu/drm/drm_syncobj.c
1388
u64_to_user_ptr(args->handles),
drivers/gpu/drm/drm_syncobj.c
1529
u64_to_user_ptr(args->handles),
drivers/gpu/drm/drm_syncobj.c
1562
u64_to_user_ptr(args->handles),
drivers/gpu/drm/drm_syncobj.c
1600
u64_to_user_ptr(args->handles),
drivers/gpu/drm/drm_syncobj.c
1612
if (!u64_to_user_ptr(args->points)) {
drivers/gpu/drm/drm_syncobj.c
1614
} else if (copy_from_user(points, u64_to_user_ptr(args->points),
drivers/gpu/drm/drm_syncobj.c
1657
uint64_t __user *points = u64_to_user_ptr(args->points);
drivers/gpu/drm/drm_syncobj.c
1671
u64_to_user_ptr(args->handles),
drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c
479
ret = copy_from_user(bos, u64_to_user_ptr(args->bos),
drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c
486
ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs),
drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c
493
ret = copy_from_user(pmrs, u64_to_user_ptr(args->pmrs),
drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c
500
ret = copy_from_user(stream, u64_to_user_ptr(args->stream),
drivers/gpu/drm/exynos/exynos_drm_vidi.c
266
const void __user *edid_userptr = u64_to_user_ptr(vidi->edid);
drivers/gpu/drm/i915/gem/i915_gem_context.c
1827
err = i915_user_extensions(u64_to_user_ptr(args->extensions),
drivers/gpu/drm/i915/gem/i915_gem_context.c
2021
if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value),
drivers/gpu/drm/i915/gem/i915_gem_context.c
2126
if (copy_from_user(&user, u64_to_user_ptr(args->value), sizeof(user)))
drivers/gpu/drm/i915/gem/i915_gem_context.c
2160
state = memdup_user(u64_to_user_ptr(user.image), ce->engine->context_size);
drivers/gpu/drm/i915/gem/i915_gem_context.c
2397
ret = i915_user_extensions(u64_to_user_ptr(args->extensions),
drivers/gpu/drm/i915/gem/i915_gem_context.c
2485
if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value),
drivers/gpu/drm/i915/gem/i915_gem_context.c
2517
if (copy_to_user(u64_to_user_ptr(args->value), &user_sseu,
drivers/gpu/drm/i915/gem/i915_gem_context.c
740
u64_to_user_ptr(args->value);
drivers/gpu/drm/i915/gem/i915_gem_context.c
798
err = i915_user_extensions(u64_to_user_ptr(extensions),
drivers/gpu/drm/i915/gem/i915_gem_context.c
828
if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value),
drivers/gpu/drm/i915/gem/i915_gem_create.c
277
u64_to_user_ptr(args->regions);
drivers/gpu/drm/i915/gem/i915_gem_create.c
454
ret = i915_user_extensions(u64_to_user_ptr(args->extensions),
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1519
u64_to_user_ptr(entry->relocs_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1633
addr = u64_to_user_ptr(entry->relocs_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
1667
urelocs = u64_to_user_ptr(eb->exec[i].relocs_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2808
user_fences = u64_to_user_ptr(timeline_fences->handles_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2812
user_values = u64_to_user_ptr(timeline_fences->values_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2941
user = u64_to_user_ptr(args->cliprects_ptr);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3155
return i915_user_extensions(u64_to_user_ptr(args->cliprects_ptr),
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3595
u64_to_user_ptr(args->buffers_ptr),
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3612
u64_to_user_ptr(args->buffers_ptr);
drivers/gpu/drm/i915/gem/i915_gem_mman.c
882
err = i915_user_extensions(u64_to_user_ptr(args->extensions),
drivers/gpu/drm/i915/gem/i915_gem_phys.c
143
char __user *user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/gem/i915_gem_phys.c
174
char __user *user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/gem/i915_gem_shmem.c
411
char __user *user_data = u64_to_user_ptr(arg->data_ptr);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
1107
u32 __user *ux = u64_to_user_ptr((u64)(addr + i * sizeof(*ux)));
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
1572
ux = u64_to_user_ptr((u64)addr);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
1724
addr = u64_to_user_ptr(start);
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
936
u32 __user *ux = u64_to_user_ptr((u64)(addr + i * sizeof(*ux)));
drivers/gpu/drm/i915/i915_gem.c
252
user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/i915_gem.c
404
user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/i915_gem.c
473
if (!access_ok(u64_to_user_ptr(args->data_ptr),
drivers/gpu/drm/i915/i915_gem.c
584
user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/i915_gem.c
699
user_data = u64_to_user_ptr(args->data_ptr);
drivers/gpu/drm/i915/i915_gem.c
755
if (!access_ok(u64_to_user_ptr(args->data_ptr), args->size))
drivers/gpu/drm/i915/i915_perf.c
4102
u64_to_user_ptr(value),
drivers/gpu/drm/i915/i915_perf.c
4240
u64_to_user_ptr(param->properties_ptr),
drivers/gpu/drm/i915/i915_perf.c
4642
u64_to_user_ptr(args->mux_regs_ptr),
drivers/gpu/drm/i915/i915_perf.c
4656
u64_to_user_ptr(args->boolean_regs_ptr),
drivers/gpu/drm/i915/i915_perf.c
4676
u64_to_user_ptr(args->flex_regs_ptr),
drivers/gpu/drm/i915/i915_query.c
134
u64_to_user_ptr(query_item->data_ptr);
drivers/gpu/drm/i915/i915_query.c
202
u32 __user *p = u64_to_user_ptr(user_regs_ptr);
drivers/gpu/drm/i915/i915_query.c
232
u64_to_user_ptr(query_item->data_ptr);
drivers/gpu/drm/i915/i915_query.c
234
u64_to_user_ptr(query_item->data_ptr +
drivers/gpu/drm/i915/i915_query.c
27
if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr),
drivers/gpu/drm/i915/i915_query.c
378
u64_to_user_ptr(query_item->data_ptr);
drivers/gpu/drm/i915/i915_query.c
466
u64_to_user_ptr(query_item->data_ptr);
drivers/gpu/drm/i915/i915_query.c
549
if (copy_to_user(u64_to_user_ptr(query_item->data_ptr),
drivers/gpu/drm/i915/i915_query.c
561
u64_to_user_ptr(query->data_ptr);
drivers/gpu/drm/i915/i915_query.c
604
u64_to_user_ptr(args->items_ptr);
drivers/gpu/drm/i915/i915_query.c
70
if (copy_to_user(u64_to_user_ptr(query_item->data_ptr),
drivers/gpu/drm/i915/i915_query.c
74
if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + sizeof(topo)),
drivers/gpu/drm/i915/i915_query.c
78
if (intel_sseu_copy_ssmask_to_user(u64_to_user_ptr(query_item->data_ptr +
drivers/gpu/drm/i915/i915_query.c
83
if (intel_sseu_copy_eumask_to_user(u64_to_user_ptr(query_item->data_ptr +
drivers/gpu/drm/i915/i915_user_extensions.c
57
ext = u64_to_user_ptr(next);
drivers/gpu/drm/imagination/pvr_context.c
76
stream = memdup_user(u64_to_user_ptr(stream_user_ptr), stream_size);
drivers/gpu/drm/imagination/pvr_drv.c
1169
return copy_struct_from_user(out, obj_size, u64_to_user_ptr(usr_ptr), usr_stride);
drivers/gpu/drm/imagination/pvr_drv.c
1178
if (copy_to_user(u64_to_user_ptr(usr_ptr), in, min_t(u32, usr_stride, obj_size)))
drivers/gpu/drm/imagination/pvr_drv.c
1182
clear_user(u64_to_user_ptr(usr_ptr + obj_size), usr_stride - obj_size)) {
drivers/gpu/drm/imagination/pvr_drv.c
1206
if (copy_from_user(out_alloc, u64_to_user_ptr(in->array),
drivers/gpu/drm/imagination/pvr_drv.c
1210
void __user *in_ptr = u64_to_user_ptr(in->array);
drivers/gpu/drm/imagination/pvr_drv.c
1243
if (copy_to_user(u64_to_user_ptr(out->array), in,
drivers/gpu/drm/imagination/pvr_drv.c
1248
void __user *out_ptr = u64_to_user_ptr(out->array);
drivers/gpu/drm/imagination/pvr_drv.c
1260
clear_user(u64_to_user_ptr(out->array + obj_size),
drivers/gpu/drm/imagination/pvr_drv.c
504
if (copy_to_user(u64_to_user_ptr(query.quirks), out,
drivers/gpu/drm/imagination/pvr_drv.c
587
if (copy_to_user(u64_to_user_ptr(query.enhancements), out,
drivers/gpu/drm/imagination/pvr_job.c
93
stream = memdup_user(u64_to_user_ptr(stream_userptr), stream_len);
drivers/gpu/drm/lima/lima_drv.c
130
if (copy_from_user(bos, u64_to_user_ptr(args->bos), size)) {
drivers/gpu/drm/lima/lima_drv.c
142
if (copy_from_user(task->frame, u64_to_user_ptr(args->frame), args->frame_size)) {
drivers/gpu/drm/msm/adreno/adreno_gpu.c
475
str = memdup_user_nul(u64_to_user_ptr(value), len);
drivers/gpu/drm/msm/msm_drv.c
616
if (copy_from_user(msm_obj->name, u64_to_user_ptr(args->value),
drivers/gpu/drm/msm/msm_drv.c
637
if (copy_to_user(u64_to_user_ptr(args->value),
drivers/gpu/drm/msm/msm_drv.c
644
obj, u64_to_user_ptr(args->value), args->len);
drivers/gpu/drm/msm/msm_drv.c
648
obj, u64_to_user_ptr(args->value), &args->len);
drivers/gpu/drm/msm/msm_gem_submit.c
139
u64_to_user_ptr(args->bos + (i * sizeof(submit_bo)));
drivers/gpu/drm/msm/msm_gem_submit.c
205
u64_to_user_ptr(args->cmds + (i * sizeof(submit_cmd)));
drivers/gpu/drm/msm/msm_gem_submit.c
249
userptr = u64_to_user_ptr(submit_cmd.relocs);
drivers/gpu/drm/msm/msm_gem_vma.c
1061
u64_to_user_ptr(args->ops + (i * sizeof(op)));
drivers/gpu/drm/msm/msm_submitqueue.c
297
ret = copy_to_user(u64_to_user_ptr(args->data), &queue->faults, size);
drivers/gpu/drm/msm/msm_syncobj.c
106
u64_to_user_ptr(address),
drivers/gpu/drm/msm/msm_syncobj.c
31
u64_to_user_ptr(address),
drivers/gpu/drm/nouveau/nouveau_drv.h
196
void __user *userptr = u64_to_user_ptr(user);
drivers/gpu/drm/nouveau/nouveau_gem.c
943
u64_to_user_ptr(req->buffers);
drivers/gpu/drm/panfrost/panfrost_drv.c
540
label = strndup_user(u64_to_user_ptr(args->label),
drivers/gpu/drm/panthor/panthor_drv.c
112
if (copy_from_user(out_alloc, u64_to_user_ptr(in->array),
drivers/gpu/drm/panthor/panthor_drv.c
116
void __user *in_ptr = u64_to_user_ptr(in->array);
drivers/gpu/drm/panthor/panthor_drv.c
64
if (copy_to_user(u64_to_user_ptr(usr_ptr), in, min_t(u32, usr_size, kern_size)))
drivers/gpu/drm/panthor/panthor_drv.c
71
clear_user(u64_to_user_ptr(usr_ptr + kern_size), usr_size - kern_size)) {
drivers/gpu/drm/qxl/qxl_ioctl.c
167
if (!access_ok(u64_to_user_ptr(cmd->command),
drivers/gpu/drm/qxl/qxl_ioctl.c
188
u64_to_user_ptr(cmd->command), cmd->command_size);
drivers/gpu/drm/qxl/qxl_ioctl.c
206
struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs);
drivers/gpu/drm/qxl/qxl_ioctl.c
282
u64_to_user_ptr(execbuffer->commands);
drivers/gpu/drm/tegra/drm.c
186
user_cmdbufs = u64_to_user_ptr(args->cmdbufs);
drivers/gpu/drm/tegra/drm.c
187
user_relocs = u64_to_user_ptr(args->relocs);
drivers/gpu/drm/tegra/drm.c
188
user_syncpt = u64_to_user_ptr(args->syncpts);
drivers/gpu/drm/tegra/submit.c
214
if (copy_from_user(bo->gather_data, u64_to_user_ptr(args->gather_data_ptr), copy_len)) {
drivers/gpu/drm/tegra/submit.c
266
bufs = alloc_copy_user_array(u64_to_user_ptr(args->bufs_ptr), args->num_bufs,
drivers/gpu/drm/tegra/submit.c
405
cmds = alloc_copy_user_array(u64_to_user_ptr(args->cmds_ptr), args->num_cmds,
drivers/gpu/drm/v3d/v3d_perfmon.c
429
if (copy_to_user(u64_to_user_ptr(req->values_ptr), perfmon->values,
drivers/gpu/drm/v3d/v3d_submit.c
179
struct drm_v3d_sem __user *handle = u64_to_user_ptr(se->in_syncs);
drivers/gpu/drm/v3d/v3d_submit.c
342
post_deps = u64_to_user_ptr(handles);
drivers/gpu/drm/v3d/v3d_submit.c
492
offsets = u64_to_user_ptr(timestamp.offsets);
drivers/gpu/drm/v3d/v3d_submit.c
493
syncs = u64_to_user_ptr(timestamp.syncs);
drivers/gpu/drm/v3d/v3d_submit.c
550
syncs = u64_to_user_ptr(reset.syncs);
drivers/gpu/drm/v3d/v3d_submit.c
606
offsets = u64_to_user_ptr(copy.offsets);
drivers/gpu/drm/v3d/v3d_submit.c
607
syncs = u64_to_user_ptr(copy.syncs);
drivers/gpu/drm/v3d/v3d_submit.c
682
ids_pointer = u64_to_user_ptr(ids);
drivers/gpu/drm/v3d/v3d_submit.c
734
u64_to_user_ptr(reset.syncs),
drivers/gpu/drm/v3d/v3d_submit.c
735
u64_to_user_ptr(reset.kperfmon_ids),
drivers/gpu/drm/v3d/v3d_submit.c
774
u64_to_user_ptr(copy.syncs),
drivers/gpu/drm/v3d/v3d_submit.c
775
u64_to_user_ptr(copy.kperfmon_ids),
drivers/gpu/drm/v3d/v3d_submit.c
807
user_ext = u64_to_user_ptr(ext_handles);
drivers/gpu/drm/v3d/v3d_submit.c
846
user_ext = u64_to_user_ptr(ext.next);
drivers/gpu/drm/vc4/vc4_bo.c
1079
name = strndup_user(u64_to_user_ptr(args->name), args->len + 1);
drivers/gpu/drm/vc4/vc4_gem.c
135
if (copy_to_user(u64_to_user_ptr(get_state->bo),
drivers/gpu/drm/vc4/vc4_gem.c
696
ret = drm_gem_objects_lookup(file_priv, u64_to_user_ptr(args->bo_handles),
drivers/gpu/drm/vc4/vc4_gem.c
781
u64_to_user_ptr(args->bin_cl),
drivers/gpu/drm/vc4/vc4_gem.c
788
u64_to_user_ptr(args->shader_rec),
drivers/gpu/drm/vc4/vc4_gem.c
795
u64_to_user_ptr(args->uniforms),
drivers/gpu/drm/vc4/vc4_perfmon.c
249
if (copy_to_user(u64_to_user_ptr(req->values_ptr), perfmon->counters,
drivers/gpu/drm/virtio/virtgpu_ioctl.c
123
if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int)))
drivers/gpu/drm/virtio/virtgpu_ioctl.c
433
if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size))
drivers/gpu/drm/virtio/virtgpu_ioctl.c
520
buf = memdup_user(u64_to_user_ptr(rc_blob->cmd),
drivers/gpu/drm/virtio/virtgpu_ioctl.c
596
ctx_set_params = memdup_user(u64_to_user_ptr(args->ctx_set_params),
drivers/gpu/drm/virtio/virtgpu_ioctl.c
662
u64_to_user_ptr(value),
drivers/gpu/drm/virtio/virtgpu_submit.c
118
u64_to_user_ptr(address),
drivers/gpu/drm/virtio/virtgpu_submit.c
208
u64_to_user_ptr(address),
drivers/gpu/drm/virtio/virtgpu_submit.c
311
if (copy_from_user(bo_handles, u64_to_user_ptr(exbuf->bo_handles),
drivers/gpu/drm/virtio/virtgpu_submit.c
416
submit->buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size);
drivers/gpu/drm/vmwgfx/vmwgfx_msg.c
1008
desc_len = strncpy_from_user(pdesc->description, u64_to_user_ptr(arg->description),
drivers/gpu/drm/xe/xe_bo.c
3090
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_bo.c
3124
u64 __user *address = u64_to_user_ptr(extensions);
drivers/gpu/drm/xe/xe_eu_stall.c
308
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_eu_stall.c
335
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_exec.c
116
struct drm_xe_sync __user *syncs_user = u64_to_user_ptr(args->syncs);
drivers/gpu/drm/xe/xe_exec.c
117
u64 __user *addresses_user = u64_to_user_ptr(args->address);
drivers/gpu/drm/xe/xe_exec_queue.c
1094
u64_to_user_ptr(args->instances);
drivers/gpu/drm/xe/xe_exec_queue.c
665
u64 __user *address = u64_to_user_ptr(value);
drivers/gpu/drm/xe/xe_exec_queue.c
927
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_exec_queue.c
971
u64 __user *address = u64_to_user_ptr(extensions);
drivers/gpu/drm/xe/xe_oa.c
1267
param->syncs_user = u64_to_user_ptr(value);
drivers/gpu/drm/xe/xe_oa.c
1336
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_oa.c
1370
u64 __user *address = u64_to_user_ptr(extension);
drivers/gpu/drm/xe/xe_oa.c
2358
err = copy_from_user(¶m, u64_to_user_ptr(data), sizeof(param));
drivers/gpu/drm/xe/xe_oa.c
2385
u64_to_user_ptr(arg->regs_ptr),
drivers/gpu/drm/xe/xe_oa.c
2447
u64 arg, *ptr = u64_to_user_ptr(data);
drivers/gpu/drm/xe/xe_query.c
136
query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
188
u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
250
u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
318
u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
367
u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
439
void __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
505
void __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
564
struct drm_xe_query_uc_fw_version __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
654
void __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
716
struct drm_xe_query_pxp_status __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_query.c
744
void __user *query_ptr = u64_to_user_ptr(query->data);
drivers/gpu/drm/xe/xe_sync.c
56
u64 __user *ptr = u64_to_user_ptr(addr);
drivers/gpu/drm/xe/xe_vm.c
2068
u64 __user *attrs_user = u64_to_user_ptr(args->vector_of_mem_attr);
drivers/gpu/drm/xe/xe_vm.c
2089
attrs_user = u64_to_user_ptr(args->vector_of_mem_attr);
drivers/gpu/drm/xe/xe_vm.c
3388
u64_to_user_ptr(args->vector_of_binds);
drivers/gpu/drm/xe/xe_vm.c
3729
syncs_user = u64_to_user_ptr(args->syncs);
drivers/gpu/drm/xe/xe_wait_user_fence.c
23
err = copy_from_user(&rvalue, u64_to_user_ptr(addr), sizeof(rvalue));
drivers/infiniband/core/ucma.c
1073
response = u64_to_user_ptr(cmd.response);
drivers/infiniband/core/ucma.c
1313
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
1465
optval = memdup_user(u64_to_user_ptr(cmd.optval),
drivers/infiniband/core/ucma.c
1561
if (copy_to_user(u64_to_user_ptr(cmd->response),
drivers/infiniband/core/ucma.c
1671
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
1738
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
408
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
478
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
624
if (copy_to_user(u64_to_user_ptr(cmd.response),
drivers/infiniband/core/ucma.c
890
if (copy_to_user(u64_to_user_ptr(cmd.response), &resp,
drivers/infiniband/core/uverbs_ioctl.c
144
return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len,
drivers/infiniband/core/uverbs_ioctl.c
206
ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data),
drivers/infiniband/core/uverbs_ioctl.c
304
if (copy_from_user(p, u64_to_user_ptr(uattr->data),
drivers/infiniband/core/uverbs_ioctl.c
728
udata->inbuf = u64_to_user_ptr(in->ptr_attr.data);
drivers/infiniband/core/uverbs_ioctl.c
735
udata->outbuf = u64_to_user_ptr(out->ptr_attr.data);
drivers/infiniband/core/uverbs_ioctl.c
753
if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size))
drivers/infiniband/core/uverbs_ioctl.c
831
if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size,
drivers/infiniband/core/uverbs_main.c
535
if (!access_ok(u64_to_user_ptr(ex_hdr->response),
drivers/infiniband/core/uverbs_main.c
652
u64_to_user_ptr(response +
drivers/infiniband/core/uverbs_main.c
663
&bundle.ucore, buf, u64_to_user_ptr(response),
drivers/infiniband/core/uverbs_main.c
669
u64_to_user_ptr(ex_hdr.response),
drivers/infiniband/core/uverbs_main.c
674
u64_to_user_ptr(ex_hdr.response) + bundle.ucore.outlen,
drivers/infiniband/core/uverbs_std_types_device.c
327
user_entries = u64_to_user_ptr(attr->ptr_attr.data);
drivers/infiniband/hw/hfi1/user_exp_rcv.c
442
if (copy_to_user(u64_to_user_ptr(tinfo->tidlist),
drivers/infiniband/hw/hfi1/user_exp_rcv.c
491
tidinfo = memdup_array_user(u64_to_user_ptr(tinfo->tidlist),
drivers/infiniband/hw/mlx5/counters.c
1080
u64_to_user_ptr(cntrs_data->counters_data),
drivers/infiniband/sw/rxe/rxe_srq.c
165
mi = u64_to_user_ptr(ucmd->mmap_info_addr);
drivers/infiniband/sw/siw/siw_qp_tx.c
70
if (copy_from_user(paddr, u64_to_user_ptr(sge->laddr),
drivers/iommu/iommufd/device.c
1556
void __user *user_ptr = u64_to_user_ptr(cmd->data_uptr);
drivers/iommu/iommufd/hw_pagetable.c
339
.uptr = u64_to_user_ptr(cmd->data_uptr),
drivers/iommu/iommufd/hw_pagetable.c
497
.uptr = u64_to_user_ptr(cmd->data_uptr),
drivers/iommu/iommufd/io_pagetable.c
582
u64_to_user_ptr(bitmap->data));
drivers/iommu/iommufd/ioas.c
164
u64_to_user_ptr(cmd->allowed_iovas),
drivers/iommu/iommufd/ioas.c
271
u64_to_user_ptr(cmd->user_va), cmd->length,
drivers/iommu/iommufd/ioas.c
84
ranges = u64_to_user_ptr(cmd->allowed_iovas);
drivers/iommu/iommufd/selftest.c
2111
u64_to_user_ptr(cmd->check_map.uptr));
drivers/iommu/iommufd/selftest.c
2114
ucmd, u64_to_user_ptr(cmd->check_refs.uptr),
drivers/iommu/iommufd/selftest.c
2134
u64_to_user_ptr(cmd->access_pages.uptr),
drivers/iommu/iommufd/selftest.c
2140
u64_to_user_ptr(cmd->access_rw.uptr),
drivers/iommu/iommufd/selftest.c
2156
u64_to_user_ptr(cmd->dirty.uptr),
drivers/iommu/iommufd/vfio_compat.c
191
rc = iopt_map_user_pages(ictx, &ioas->iopt, &iova, u64_to_user_ptr(map.vaddr),
drivers/iommu/iommufd/viommu.c
22
.uptr = u64_to_user_ptr(cmd->data_uptr),
drivers/media/rc/bpf-lirc.c
297
__u32 __user *prog_ids = u64_to_user_ptr(attr->query.prog_ids);
drivers/media/v4l2-core/v4l2-ioctl.c
3237
*user_ptr = u64_to_user_ptr(routing->routes);
drivers/misc/fastrpc.c
1322
name = memdup_user(u64_to_user_ptr(init.name), init.namelen);
drivers/misc/nsm.c
159
if (copy_from_user(req->data, u64_to_user_ptr(raw->request.addr),
drivers/misc/nsm.c
176
if (copy_to_user(u64_to_user_ptr(raw->response.addr),
drivers/misc/nsm.c
356
void __user *argp = u64_to_user_ptr((u64)arg);
drivers/misc/ntsync.c
882
if (copy_from_user(fds, u64_to_user_ptr(args->objs), size))
drivers/nvme/host/ioctl.c
497
u64_to_user_ptr(d.addr), d.data_len,
drivers/platform/surface/surface_aggregator_cdev.c
270
plddata = u64_to_user_ptr(rqst.payload.data);
drivers/platform/surface/surface_aggregator_cdev.c
271
rspdata = u64_to_user_ptr(rqst.response.data);
drivers/s390/net/qeth_core_main.c
4806
tmp = u64_to_user_ptr(oat_data.ptr);
drivers/tee/qcomtee/call.c
515
uaddr = u64_to_user_ptr(params->u.value.a);
drivers/tee/tee_core.c
464
params[n].u.ubuf.uaddr = u64_to_user_ptr(ip.a);
drivers/tee/tee_core.c
555
uarg = u64_to_user_ptr(buf.buf_ptr);
drivers/tee/tee_core.c
632
uarg = u64_to_user_ptr(buf.buf_ptr);
drivers/tee/tee_core.c
692
uarg = u64_to_user_ptr(buf.buf_ptr);
drivers/tee/tee_core.c
837
uarg = u64_to_user_ptr(buf.buf_ptr);
drivers/tee/tee_core.c
895
p->u.ubuf.uaddr = u64_to_user_ptr(ip.a);
drivers/tee/tee_core.c
950
uarg = u64_to_user_ptr(buf.buf_ptr);
drivers/vfio/device_cdev.c
77
if (copy_from_user(&uuid, u64_to_user_ptr(bind->token_uuid_ptr),
drivers/vfio/vfio_main.c
1085
ranges = u64_to_user_ptr(control.ranges);
drivers/vfio/vfio_main.c
1194
u64_to_user_ptr(report.bitmap));
drivers/xen/privcmd.c
1271
ports = memdup_array_user(u64_to_user_ptr(ioeventfd->ports),
drivers/xen/privcmd.c
991
if (copy_from_user(dm_op, u64_to_user_ptr(irqfd->dm_op), irqfd->size)) {
fs/btrfs/ioctl.c
4705
sqe_addr = u64_to_user_ptr(READ_ONCE(cmd->sqe->addr));
fs/btrfs/ioctl.c
4846
sqe_addr = u64_to_user_ptr(READ_ONCE(cmd->sqe->addr));
fs/fuse/dev_uring.c
1016
struct iovec __user *uiov = u64_to_user_ptr(READ_ONCE(sqe->addr));
fs/fuse/ioctl.c
157
iov->iov_base = u64_to_user_ptr(enable.salt_ptr);
fs/fuse/ioctl.c
165
iov->iov_base = u64_to_user_ptr(enable.sig_ptr);
fs/nilfs2/ioctl.c
1207
kbuf = vmemdup_user(u64_to_user_ptr(argv.v_base), len);
fs/nilfs2/ioctl.c
52
void __user *base = u64_to_user_ptr(argv->v_base);
fs/nilfs2/ioctl.c
865
kbufs[4] = memdup_array_user(u64_to_user_ptr(argv[4].v_base),
fs/nilfs2/ioctl.c
890
kbufs[n] = vmemdup_user(u64_to_user_ptr(argv[n].v_base), len);
fs/proc/task_mmu.c
792
if (karg.vma_name_size && copy_to_user(u64_to_user_ptr(karg.vma_name_addr),
fs/proc/task_mmu.c
799
if (karg.build_id_size && copy_to_user(u64_to_user_ptr(karg.build_id_addr),
fs/verity/enable.c
200
copy_from_user(desc->salt, u64_to_user_ptr(arg->salt_ptr),
fs/verity/enable.c
209
copy_from_user(desc->signature, u64_to_user_ptr(arg->sig_ptr),
fs/verity/read_metadata.c
190
buf = u64_to_user_ptr(arg.buf_ptr);
fs/xattr.c
734
u64_to_user_ptr(args.value), args.size,
fs/xattr.c
876
u64_to_user_ptr(args.value), args.size);
fs/xfs/scrub/scrub.c
928
uvectors = u64_to_user_ptr(head.svh_vectors);
fs/xfs/xfs_handle.c
835
if (copy_to_user(u64_to_user_ptr(gpx->gph.gph_request.gp_buffer),
include/linux/bpfptr.h
31
return USER_BPFPTR(u64_to_user_ptr(addr));
include/rdma/uverbs_ioctl.h
813
else if (copy_from_user(to, u64_to_user_ptr(attr->ptr_attr.data),
include/rdma/uverbs_ioctl.h
835
else if (copy_from_user(to, u64_to_user_ptr(attr->ptr_attr.data),
io_uring/bpf_filter.c
374
fprog.filter = u64_to_user_ptr(reg.filter.filter_ptr);
io_uring/cmd_net.c
148
uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/cmd_net.c
149
ulen = u64_to_user_ptr(READ_ONCE(sqe->addr3));
io_uring/cmd_net.c
23
optval = u64_to_user_ptr(READ_ONCE(sqe->optval));
io_uring/cmd_net.c
47
optval = u64_to_user_ptr(READ_ONCE(sqe->optval));
io_uring/epoll.c
43
ev = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/epoll.c
75
iew->events = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
124
fname = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
173
fname = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
217
oldpath = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
218
newpath = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/fs.c
264
oldf = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
265
newf = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/fs.c
62
oldf = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/fs.c
63
newf = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/futex.c
136
iof->uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/futex.c
183
iof->uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/io_uring.c
2498
ext_arg->sig = u64_to_user_ptr(READ_ONCE(w->sigmask));
io_uring/io_uring.c
2527
ext_arg->sig = u64_to_user_ptr(arg.sigmask);
io_uring/io_uring.c
2530
if (get_timespec64(&ext_arg->ts, u64_to_user_ptr(arg.ts)))
io_uring/kbuf.c
151
return u64_to_user_ptr(kbuf->addr);
io_uring/kbuf.c
216
sel.addr = u64_to_user_ptr(READ_ONCE(buf->addr));
io_uring/kbuf.c
312
iov->iov_base = u64_to_user_ptr(READ_ONCE(buf->addr));
io_uring/kbuf.c
522
if (!access_ok(u64_to_user_ptr(p->addr), size))
io_uring/mock_file.c
225
uarg = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/mock_file.c
284
uarg = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/mock_file.c
74
ubuf = u64_to_user_ptr(READ_ONCE(sqe->addr3));
io_uring/mock_file.c
75
iovec = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
1622
accept->addr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
1623
accept->addr_len = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/net.c
1781
conn->addr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
1853
uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
357
sr->buf = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
368
addr = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/net.c
399
sr->umsg = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/net.c
794
sr->umsg = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/openclose.c
111
how = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/openclose.c
333
p->fds = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/openclose.c
69
fname = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/query.c
126
uhdr = u64_to_user_ptr(next_hdr);
io_uring/query.c
68
udata = u64_to_user_ptr(hdr.query_data);
io_uring/register.c
685
rd_uptr = u64_to_user_ptr(reg.region_uptr);
io_uring/rsrc.c
215
u64 __user *tags = u64_to_user_ptr(up->tags);
io_uring/rsrc.c
216
__s32 __user *fds = u64_to_user_ptr(up->data);
io_uring/rsrc.c
280
u64 __user *tags = u64_to_user_ptr(up->tags);
io_uring/rsrc.c
297
uvec = u64_to_user_ptr(user_data);
io_uring/rsrc.c
400
return io_sqe_files_register(ctx, u64_to_user_ptr(rr.data),
io_uring/rsrc.c
401
rr.nr, u64_to_user_ptr(rr.tags));
io_uring/rsrc.c
405
return io_sqe_buffers_register(ctx, u64_to_user_ptr(rr.data),
io_uring/rsrc.c
406
rr.nr, u64_to_user_ptr(rr.tags));
io_uring/rsrc.c
432
__s32 __user *fds = u64_to_user_ptr(up->arg);
io_uring/rw.c
118
sel->addr = u64_to_user_ptr(rw->addr);
io_uring/rw.c
239
if (copy_from_user(&pi_attr, u64_to_user_ptr(attr_ptr),
io_uring/rw.c
250
ret = import_ubuf(ddir, u64_to_user_ptr(pi_attr.addr),
io_uring/rw.c
422
uvec = u64_to_user_ptr(rw->addr);
io_uring/rw.c
55
struct compat_iovec __user *uiov = u64_to_user_ptr(rw->addr);
io_uring/rw.c
723
addr = u64_to_user_ptr(rw->addr);
io_uring/rw.c
76
uiov = u64_to_user_ptr(rw->addr);
io_uring/statx.c
36
path = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/statx.c
37
sx->buffer = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/timeout.c
465
if (get_timespec64(&tr->ts, u64_to_user_ptr(READ_ONCE(sqe->addr2))))
io_uring/timeout.c
560
if (get_timespec64(&data->ts, u64_to_user_ptr(READ_ONCE(sqe->addr))))
io_uring/waitid.c
277
iw->infop = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/xattr.c
130
name = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/xattr.c
131
ix->ctx.cvalue = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/xattr.c
164
path = u64_to_user_ptr(READ_ONCE(sqe->addr3));
io_uring/xattr.c
51
name = u64_to_user_ptr(READ_ONCE(sqe->addr));
io_uring/xattr.c
52
ix->ctx.value = u64_to_user_ptr(READ_ONCE(sqe->addr2));
io_uring/xattr.c
92
path = u64_to_user_ptr(READ_ONCE(sqe->addr3));
io_uring/zcrx.c
783
if (copy_from_user(&rd, u64_to_user_ptr(reg.region_ptr), sizeof(rd)))
io_uring/zcrx.c
794
if (copy_from_user(&area, u64_to_user_ptr(reg.area_ptr), sizeof(area)))
io_uring/zcrx.c
862
copy_to_user(u64_to_user_ptr(reg.region_ptr), &rd, sizeof(rd)) ||
io_uring/zcrx.c
863
copy_to_user(u64_to_user_ptr(reg.area_ptr), &area, sizeof(area))) {
kernel/bpf/bpf_iter.c
456
char __user *ubuf = u64_to_user_ptr(info->iter.target_name);
kernel/bpf/btf.c
5887
char __user *log_ubuf = u64_to_user_ptr(attr->btf_log_buf);
kernel/bpf/btf.c
8201
uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/btf.c
8210
ubtf = u64_to_user_ptr(info.btf);
kernel/bpf/btf.c
8218
uname = u64_to_user_ptr(info.name);
kernel/bpf/cgroup.c
1213
__u32 __user *prog_attach_flags = u64_to_user_ptr(attr->query.prog_attach_flags);
kernel/bpf/cgroup.c
1215
__u32 __user *prog_ids = u64_to_user_ptr(attr->query.prog_ids);
kernel/bpf/hashtab.c
1774
void __user *uvalues = u64_to_user_ptr(attr->batch.values);
kernel/bpf/hashtab.c
1775
void __user *ukeys = u64_to_user_ptr(attr->batch.keys);
kernel/bpf/hashtab.c
1776
void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch);
kernel/bpf/hashtab.c
1995
ubatch = u64_to_user_ptr(attr->batch.out_batch);
kernel/bpf/mprog.c
420
uprog_id = u64_to_user_ptr(attr->query.prog_ids);
kernel/bpf/mprog.c
421
uprog_flags = u64_to_user_ptr(attr->query.prog_attach_flags);
kernel/bpf/mprog.c
422
ulink_id = u64_to_user_ptr(attr->query.link_ids);
kernel/bpf/mprog.c
423
ulink_flags = u64_to_user_ptr(attr->query.link_attach_flags);
kernel/bpf/net_namespace.c
250
__u32 __user *prog_ids = u64_to_user_ptr(attr->query.prog_ids);
kernel/bpf/offload.c
483
uinsns = u64_to_user_ptr(info->jited_prog_insns);
kernel/bpf/syscall.c
1713
void __user *ukey = u64_to_user_ptr(attr->key);
kernel/bpf/syscall.c
1714
void __user *uvalue = u64_to_user_ptr(attr->value);
kernel/bpf/syscall.c
1881
void __user *ukey = u64_to_user_ptr(attr->key);
kernel/bpf/syscall.c
1882
void __user *unext_key = u64_to_user_ptr(attr->next_key);
kernel/bpf/syscall.c
1939
void __user *keys = u64_to_user_ptr(attr->batch.keys);
kernel/bpf/syscall.c
1995
void __user *values = u64_to_user_ptr(attr->batch.values);
kernel/bpf/syscall.c
1996
void __user *keys = u64_to_user_ptr(attr->batch.keys);
kernel/bpf/syscall.c
2053
void __user *uobatch = u64_to_user_ptr(attr->batch.out_batch);
kernel/bpf/syscall.c
2054
void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch);
kernel/bpf/syscall.c
2055
void __user *values = u64_to_user_ptr(attr->batch.values);
kernel/bpf/syscall.c
2056
void __user *keys = u64_to_user_ptr(attr->batch.keys);
kernel/bpf/syscall.c
2144
void __user *ukey = u64_to_user_ptr(attr->key);
kernel/bpf/syscall.c
2145
void __user *uvalue = u64_to_user_ptr(attr->value);
kernel/bpf/syscall.c
3163
u64_to_user_ptr(attr->pathname));
kernel/bpf/syscall.c
3179
return bpf_obj_get_user(path_fd, u64_to_user_ptr(attr->pathname),
kernel/bpf/syscall.c
3854
char __user *ubuf = u64_to_user_ptr(info->raw_tracepoint.tp_name);
kernel/bpf/syscall.c
3951
uname = u64_to_user_ptr(info->perf_event.kprobe.func_name);
kernel/bpf/syscall.c
4007
uname = u64_to_user_ptr(info->perf_event.uprobe.file_name);
kernel/bpf/syscall.c
4072
uname = u64_to_user_ptr(info->perf_event.tracepoint.tp_name);
kernel/bpf/syscall.c
4331
tp_name = u64_to_user_ptr(attr->raw_tracepoint.name);
kernel/bpf/syscall.c
5017
struct bpf_prog_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/syscall.c
5050
u32 __user *user_map_ids = u64_to_user_ptr(info.map_ids);
kernel/bpf/syscall.c
5096
uinsns = u64_to_user_ptr(info.xlated_prog_insns);
kernel/bpf/syscall.c
5131
uinsns = u64_to_user_ptr(info.jited_prog_insns);
kernel/bpf/syscall.c
5174
user_ksyms = u64_to_user_ptr(info.jited_ksyms);
kernel/bpf/syscall.c
5202
user_lens = u64_to_user_ptr(info.jited_func_lens);
kernel/bpf/syscall.c
5229
user_finfo = u64_to_user_ptr(info.func_info);
kernel/bpf/syscall.c
5241
user_linfo = u64_to_user_ptr(info.line_info);
kernel/bpf/syscall.c
5259
user_linfo = u64_to_user_ptr(info.jited_line_info);
kernel/bpf/syscall.c
5277
user_prog_tags = u64_to_user_ptr(info.prog_tags);
kernel/bpf/syscall.c
5306
struct bpf_map_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/syscall.c
5345
char __user *uhash = u64_to_user_ptr(info.hash);
kernel/bpf/syscall.c
5378
struct bpf_btf_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/syscall.c
5394
struct bpf_link_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/syscall.c
5432
struct bpf_token_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/bpf/syscall.c
5541
char __user *ubuf = u64_to_user_ptr(attr->task_fd_query.buf);
kernel/bpf/syscall.c
6149
char __user *buf = u64_to_user_ptr(attr->prog_stream_read.stream_buf);
kernel/bpf/token.c
201
struct bpf_token_info __user *uinfo = u64_to_user_ptr(attr->info.info);
kernel/fork.c
2846
.pidfd = u64_to_user_ptr(args.pidfd),
kernel/fork.c
2847
.child_tid = u64_to_user_ptr(args.child_tid),
kernel/fork.c
2848
.parent_tid = u64_to_user_ptr(args.parent_tid),
kernel/fork.c
2858
copy_from_user(kset_tid, u64_to_user_ptr(args.set_tid),
kernel/futex/syscalls.c
472
return futex_requeue(u64_to_user_ptr(futexes[0].w.uaddr), futexes[0].w.flags,
kernel/futex/syscalls.c
473
u64_to_user_ptr(futexes[1].w.uaddr), futexes[1].w.flags,
kernel/futex/waitwake.c
433
ret = get_futex_key(u64_to_user_ptr(vs[i].w.uaddr),
kernel/trace/bpf_trace.c
2405
u64 __user *ucookies = u64_to_user_ptr(info->kprobe_multi.cookies);
kernel/trace/bpf_trace.c
2406
u64 __user *uaddrs = u64_to_user_ptr(info->kprobe_multi.addrs);
kernel/trace/bpf_trace.c
2767
uaddrs = u64_to_user_ptr(attr->link_create.kprobe_multi.addrs);
kernel/trace/bpf_trace.c
2768
usyms = u64_to_user_ptr(attr->link_create.kprobe_multi.syms);
kernel/trace/bpf_trace.c
2783
ucookies = u64_to_user_ptr(attr->link_create.kprobe_multi.cookies);
kernel/trace/bpf_trace.c
2964
u64 __user *uref_ctr_offsets = u64_to_user_ptr(info->uprobe_multi.ref_ctr_offsets);
kernel/trace/bpf_trace.c
2965
u64 __user *ucookies = u64_to_user_ptr(info->uprobe_multi.cookies);
kernel/trace/bpf_trace.c
2966
u64 __user *uoffsets = u64_to_user_ptr(info->uprobe_multi.offsets);
kernel/trace/bpf_trace.c
2967
u64 __user *upath = u64_to_user_ptr(info->uprobe_multi.path);
kernel/trace/bpf_trace.c
3206
upath = u64_to_user_ptr(attr->link_create.uprobe_multi.path);
kernel/trace/bpf_trace.c
3207
uoffsets = u64_to_user_ptr(attr->link_create.uprobe_multi.offsets);
kernel/trace/bpf_trace.c
3216
uref_ctr_offsets = u64_to_user_ptr(attr->link_create.uprobe_multi.ref_ctr_offsets);
kernel/trace/bpf_trace.c
3217
ucookies = u64_to_user_ptr(attr->link_create.uprobe_multi.cookies);
kernel/trace/trace_event_perf.c
254
func = strndup_user(u64_to_user_ptr(p_event->attr.kprobe_func),
kernel/trace/trace_event_perf.c
308
path = strndup_user(u64_to_user_ptr(p_event->attr.uprobe_path),
lib/test_hmm.c
1182
if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr,
lib/test_hmm.c
1354
uptr = u64_to_user_ptr(cmd->ptr);
lib/test_hmm.c
420
if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr,
lib/test_hmm.c
470
if (copy_from_user(bounce.ptr, u64_to_user_ptr(cmd->ptr),
lib/test_hmm.c
932
if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr,
net/bpf/bpf_dummy_struct_ops.c
43
ctx_in = u64_to_user_ptr(kattr->test.ctx_in);
net/bpf/bpf_dummy_struct_ops.c
48
u_state = u64_to_user_ptr(args->args[0]);
net/bpf/bpf_dummy_struct_ops.c
63
u_state = u64_to_user_ptr(args->args[0]);
net/bpf/test_run.c
1076
void __user *data_in = u64_to_user_ptr(kattr->test.data_in);
net/bpf/test_run.c
1358
void __user *data_in = u64_to_user_ptr(kattr->test.data_in);
net/bpf/test_run.c
1616
void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in);
net/bpf/test_run.c
442
void __user *data_out = u64_to_user_ptr(kattr->test.data_out);
net/bpf/test_run.c
654
void __user *data_in = u64_to_user_ptr(kattr->test.data_in);
net/bpf/test_run.c
750
void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in);
net/bpf/test_run.c
806
void __user *data_in = u64_to_user_ptr(kattr->test.ctx_in);
net/bpf/test_run.c
807
void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out);
net/bpf/test_run.c
839
void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out);
net/core/sock_map.c
1558
__u32 __user *prog_ids = u64_to_user_ptr(attr->query.prog_ids);
net/mptcp/sockopt.c
1318
sfinfoptr = u64_to_user_ptr(mfi.subflow_info);
net/mptcp/sockopt.c
1322
tcpinfoptr = u64_to_user_ptr(mfi.tcp_info);
virt/kvm/vfio.c
298
u64_to_user_ptr(attr->addr));