arch/arm/mm/cache-feroceon-l2.c
141
unsigned long range_end;
arch/arm/mm/cache-feroceon-l2.c
149
range_end = end;
arch/arm/mm/cache-feroceon-l2.c
156
if (range_end > start + MAX_RANGE_SIZE)
arch/arm/mm/cache-feroceon-l2.c
157
range_end = start + MAX_RANGE_SIZE;
arch/arm/mm/cache-feroceon-l2.c
162
if (range_end > (start | (PAGE_SIZE - 1)) + 1)
arch/arm/mm/cache-feroceon-l2.c
163
range_end = (start | (PAGE_SIZE - 1)) + 1;
arch/arm/mm/cache-feroceon-l2.c
165
return range_end;
arch/arm/mm/cache-feroceon-l2.c
190
unsigned long range_end = calc_range_end(start, end);
arch/arm/mm/cache-feroceon-l2.c
191
l2_inv_pa_range(start, range_end - CACHE_LINE_SIZE);
arch/arm/mm/cache-feroceon-l2.c
192
start = range_end;
arch/arm/mm/cache-feroceon-l2.c
208
unsigned long range_end = calc_range_end(start, end);
arch/arm/mm/cache-feroceon-l2.c
209
l2_clean_pa_range(start, range_end - CACHE_LINE_SIZE);
arch/arm/mm/cache-feroceon-l2.c
210
start = range_end;
arch/arm/mm/cache-feroceon-l2.c
222
unsigned long range_end = calc_range_end(start, end);
arch/arm/mm/cache-feroceon-l2.c
224
l2_clean_pa_range(start, range_end - CACHE_LINE_SIZE);
arch/arm/mm/cache-feroceon-l2.c
225
l2_inv_pa_range(start, range_end - CACHE_LINE_SIZE);
arch/arm/mm/cache-feroceon-l2.c
226
start = range_end;
arch/arm/mm/cache-l2x0.c
1380
unsigned long range_end;
arch/arm/mm/cache-l2x0.c
1393
range_end = aurora_range_end(start, end);
arch/arm/mm/cache-l2x0.c
1397
writel_relaxed(range_end - CACHE_LINE_SIZE, base + offset);
arch/arm/mm/cache-l2x0.c
1401
start = range_end;
arch/arm/mm/pageattr.c
29
unsigned long range_start, unsigned long range_end)
arch/arm/mm/pageattr.c
31
return start >= range_start && start < range_end &&
arch/arm/mm/pageattr.c
32
size <= range_end - start;
arch/arm64/kvm/hyp/include/nvhe/gfp.h
21
phys_addr_t range_end;
arch/arm64/kvm/hyp/nvhe/page_alloc.c
103
if (phys < pool->range_start || phys >= pool->range_end)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
236
pool->range_end = phys + (nr_pages << PAGE_SHIFT);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
45
if (addr < pool->range_start || addr >= pool->range_end)
arch/powerpc/kernel/fadump.c
1748
phys_addr_t range_start, range_end;
arch/powerpc/kernel/fadump.c
1763
range_end = memblock_end_of_DRAM();
arch/powerpc/kernel/fadump.c
1778
range_end = min(ppc64_rma_size, fw_dump.boot_mem_top);
arch/powerpc/kernel/fadump.c
1784
range_end);
arch/powerpc/platforms/powernv/opal-prd.c
64
uint64_t range_addr, range_size, range_end;
arch/powerpc/platforms/powernv/opal-prd.c
73
range_end = range_addr + range_size;
arch/powerpc/platforms/powernv/opal-prd.c
81
if (range_end <= range_addr)
arch/powerpc/platforms/powernv/opal-prd.c
84
if (addr >= range_addr && addr + size <= range_end) {
arch/s390/boot/physmem_info.c
278
unsigned long range_start, range_end;
arch/s390/boot/physmem_info.c
284
__get_physmem_range(nranges - 1, &range_start, &range_end, false);
arch/s390/boot/physmem_info.c
285
pos = min(range_end, pos);
arch/x86/kernel/e820.c
212
u64 range_start, range_end;
arch/x86/kernel/e820.c
215
range_end = entry->addr + entry->size;
arch/x86/kernel/e820.c
228
pr_info("%s: [mem %#018Lx-%#018Lx] ", who, range_start, range_end-1);
arch/x86/kernel/e820.c
232
range_end_prev = range_end;
arch/x86/kernel/e820.c
633
u64 range_start, range_end;
arch/x86/kernel/e820.c
637
range_end = entry->addr + entry->size;
arch/x86/kernel/e820.c
658
range_end_prev = range_end;
drivers/accel/habanalabs/goya/goya_coresight.c
371
u64 range_start, range_end;
drivers/accel/habanalabs/goya/goya_coresight.c
380
range_end = prop->dmmu.end_addr;
drivers/accel/habanalabs/goya/goya_coresight.c
382
return hl_mem_area_inside_range(addr, size, range_start, range_end);
drivers/firmware/efi/libstub/unaccepted_memory.c
182
unsigned long range_start, range_end;
drivers/firmware/efi/libstub/unaccepted_memory.c
212
for_each_set_bitrange_from(range_start, range_end,
drivers/firmware/efi/libstub/unaccepted_memory.c
217
phys_end = range_end * unit_size + unaccepted_table->phys_base;
drivers/firmware/efi/libstub/unaccepted_memory.c
221
range_start, range_end - range_start);
drivers/firmware/efi/unaccepted_memory.c
130
for_each_set_bitrange_from(range_start, range_end, unaccepted->bitmap,
drivers/firmware/efi/unaccepted_memory.c
133
unsigned long len = range_end - range_start;
drivers/firmware/efi/unaccepted_memory.c
136
phys_end = range_end * unit_size + unaccepted->phys_base;
drivers/firmware/efi/unaccepted_memory.c
36
unsigned long range_start, range_end;
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1355
(*mem_obj)->range_end = found;
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1378
(*mem_obj)->range_end =
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1386
if ((*mem_obj)->range_end != found) {
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1406
(*mem_obj)->range_start, (*mem_obj)->range_end);
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1410
(*mem_obj)->range_end - (*mem_obj)->range_start + 1);
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1432
mem_obj, mem_obj->range_start, mem_obj->range_end);
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1438
mem_obj->range_end - mem_obj->range_start + 1);
drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c
213
int range_end = dev->shared_resources.non_cp_doorbells_end;
drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c
219
pr_debug("reserved doorbell 0x%03x - 0x%03x\n", range_start, range_end);
drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c
222
range_end + KFD_QUEUE_DOORBELL_MIRROR_OFFSET);
drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c
225
if (i >= range_start && i <= range_end) {
drivers/gpu/drm/amd/amdkfd/kfd_priv.h
252
uint32_t range_end;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
338
int range_end;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
351
range_end = mcache_boundaries[left_cache_id] - shift - 1;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
353
if (range_start <= pipe_h_vp_start && pipe_h_vp_start <= range_end)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
356
range_start = range_end + 1;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
359
range_end = MAX_VP;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
366
if (range_start <= pipe_h_vp_end && pipe_h_vp_end <= range_end) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_top/dml2_top_soc15.c
369
range_end = range_start - 1;
drivers/gpu/drm/drm_mm.c
519
u64 range_start, u64 range_end,
drivers/gpu/drm/drm_mm.c
526
DRM_MM_BUG_ON(range_start > range_end);
drivers/gpu/drm/drm_mm.c
528
if (unlikely(size == 0 || range_end - range_start < size))
drivers/gpu/drm/drm_mm.c
541
for (hole = first_hole(mm, range_start, range_end, size, mode);
drivers/gpu/drm/drm_mm.c
549
if (mode == DRM_MM_INSERT_LOW && hole_start >= range_end)
drivers/gpu/drm/drm_mm.c
561
adj_end = min(col_end, range_end);
drivers/gpu/drm/drm_mm.c
582
min(col_end, range_end) - adj_start < size)
drivers/gpu/drm/drm_mm.c
728
scan->range_end = end;
drivers/gpu/drm/drm_mm.c
779
adj_end = min(col_end, scan->range_end);
drivers/gpu/drm/drm_mm.c
798
min(col_end, scan->range_end) - adj_start < scan->size)
drivers/gpu/drm/i915/gem/i915_gem_shmem.c
313
.range_end = LLONG_MAX,
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1397
u64 range_start, range_end;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1404
range_end = 0x80000000;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1408
range_end = iova + size;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1418
range_start, range_end);
drivers/gpu/drm/msm/msm_gem.c
438
u64 range_end)
drivers/gpu/drm/msm/msm_gem.c
447
vma = msm_gem_vma_new(vm, obj, 0, range_start, range_end);
drivers/gpu/drm/msm/msm_gem.c
450
GEM_WARN_ON((vma->va.addr + obj->size) > range_end);
drivers/gpu/drm/msm/msm_gem.c
525
u64 range_start, u64 range_end)
drivers/gpu/drm/msm/msm_gem.c
535
vma = get_vma_locked(obj, vm, range_start, range_end);
drivers/gpu/drm/msm/msm_gem.c
554
u64 range_start, u64 range_end)
drivers/gpu/drm/msm/msm_gem.c
560
ret = get_and_pin_iova_range_locked(obj, vm, iova, range_start, range_end);
drivers/gpu/drm/msm/msm_gem.h
190
u64 offset, u64 range_start, u64 range_end);
drivers/gpu/drm/msm/msm_gem.h
277
u64 range_start, u64 range_end);
drivers/gpu/drm/msm/msm_gem_vma.c
369
u64 offset, u64 range_start, u64 range_end)
drivers/gpu/drm/msm/msm_gem_vma.c
387
range_start, range_end, 0);
drivers/gpu/drm/msm/msm_gem_vma.c
393
range_end = range_start + obj->size;
drivers/gpu/drm/msm/msm_gem_vma.c
397
GEM_WARN_ON((range_end - range_start) > obj->size);
drivers/gpu/drm/msm/msm_gem_vma.c
401
.va.range = range_end - range_start,
drivers/gpu/drm/xe/xe_reg_whitelist.c
203
u32 range_start, range_end;
drivers/gpu/drm/xe/xe_reg_whitelist.c
221
range_end = range_start | REG_GENMASK(range_bit, 0);
drivers/gpu/drm/xe/xe_reg_whitelist.c
236
range_start, range_end,
drivers/gpu/drm/xe/xe_svm.c
378
u64 range_end;
drivers/gpu/drm/xe/xe_svm.c
395
range_end = xe_svm_range_end(range);
drivers/gpu/drm/xe/xe_svm.c
409
err = xe_svm_range_set_default_attr(vm, range_start, range_end);
drivers/gpu/drm/xe/xe_vm.c
2261
u64 range_end = addr + range;
drivers/gpu/drm/xe/xe_vm.c
2274
xe_vm_find_cpu_addr_mirror_vma_range(vm, &range_start, &range_end);
drivers/gpu/drm/xe/xe_vm.c
2282
.map.va.range = range_end - range_start,
drivers/gpu/drm/xe/xe_vm.c
2377
u64 ret = xe_svm_find_vma_start(vm, addr, range_end, vma);
drivers/gpu/drm/xe/xe_vm.c
2407
if (range_end > xe_svm_range_end(svm_range) &&
drivers/infiniband/hw/hfi1/fault.c
111
unsigned long range_start, range_end, i;
drivers/infiniband/hw/hfi1/fault.c
129
if (kstrtoul(token, 0, &range_end))
drivers/infiniband/hw/hfi1/fault.c
132
range_end = range_start;
drivers/infiniband/hw/hfi1/fault.c
134
if (range_start == range_end && range_start == -1UL) {
drivers/infiniband/hw/hfi1/fault.c
140
if (range_start >= bound || range_end >= bound)
drivers/infiniband/hw/hfi1/fault.c
143
for (i = range_start; i <= range_end; i++) {
drivers/mmc/host/sdhci-sprd.c
594
int range_end = SDHCI_SPRD_MAX_RANGE;
drivers/mmc/host/sdhci-sprd.c
608
range_end = i - 1;
drivers/mmc/host/sdhci-sprd.c
619
range_end = i - 1;
drivers/mmc/host/sdhci-sprd.c
622
middle_range = range_end - (range_length - 1) / 2;
drivers/net/ethernet/cisco/enic/enic.h
73
u32 range_end;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
385
ecmd->rx_coalesce_usecs_high = rxcoal->range_end;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
459
rxcoal->range_end = rx_coalesce_usecs_high;
drivers/net/ethernet/cisco/enic/enic_main.c
1314
timer = range_start + ((rx_coal->range_end - range_start) *
drivers/net/ethernet/cisco/enic/enic_main.c
400
rx_coal->range_end = ENIC_RX_COALESCE_RANGE_END;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
2734
s16 range_end = meta->pkt_cache.range_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
2741
len = range_end - range_start;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
2866
if (meta->pkt_cache.range_end) {
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4279
s16 range_start = 0, range_end = 0;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4332
s16 new_end = range_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4341
if (end > range_end) {
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4352
range_end = new_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4359
range_node->pkt_cache.range_end = range_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4366
range_end = insn->off + BPF_LDST_BYTES(insn);
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4371
range_node->pkt_cache.range_end = range_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4381
range_end = meta->pkt_cache.range_end;
drivers/net/ethernet/netronome/nfp/bpf/jit.c
4384
meta->pkt_cache.range_end = range_end;
drivers/net/ethernet/netronome/nfp/bpf/main.h
311
s16 range_end;
fs/afs/inode.c
756
.range_end = LLONG_MAX,
fs/btrfs/compression.h
73
static inline u32 btrfs_calc_input_length(struct folio *folio, u64 range_end, u64 cur)
fs/btrfs/compression.h
78
return umin(range_end, folio_next_pos(folio)) - cur;
fs/btrfs/extent-tree.c
6842
u64 range_end = U64_MAX;
fs/btrfs/extent-tree.c
6860
check_add_overflow(range->start, range->len, &range_end))
fs/btrfs/extent-tree.c
6865
if (cache->start >= range_end) {
fs/btrfs/extent-tree.c
6871
end = min(range_end, btrfs_block_group_end(cache));
fs/btrfs/extent_io.c
1272
const u64 range_end = min(end, ordered->file_offset + ordered->num_bytes - 1);
fs/btrfs/extent_io.c
1275
while (cur < range_end) {
fs/btrfs/extent_io.c
2344
end = (wbc->range_end >> fs_info->nodesize_bits);
fs/btrfs/extent_io.c
2495
end = wbc->range_end >> PAGE_SHIFT;
fs/btrfs/extent_io.c
2496
if (wbc->range_start == 0 && wbc->range_end == LLONG_MAX)
fs/btrfs/extent_map.c
529
u64 end = range_end(start, len);
fs/btrfs/extent_map.c
97
u64 end = range_end(em->start, em->len);
fs/btrfs/fiemap.c
179
const u64 range_end = offset + len;
fs/btrfs/fiemap.c
209
if (range_end <= cache_end)
fs/btrfs/fiemap.c
216
len = range_end - cache_end;
fs/btrfs/fiemap.c
643
u64 range_end;
fs/btrfs/fiemap.c
660
range_end = round_up(start + len, sectorsize);
fs/btrfs/fiemap.c
663
btrfs_lock_extent(&inode->io_tree, range_start, range_end, &cached_state);
fs/btrfs/fiemap.c
683
while (prev_extent_end < range_end) {
fs/btrfs/fiemap.c
713
const u64 hole_end = min(key.offset, range_end) - 1;
fs/btrfs/fiemap.c
728
if (key.offset >= range_end) {
fs/btrfs/fiemap.c
812
if (!stopped && prev_extent_end < range_end) {
fs/btrfs/fiemap.c
815
0, 0, 0, prev_extent_end, range_end - 1);
fs/btrfs/fiemap.c
818
prev_extent_end = range_end;
fs/btrfs/fiemap.c
843
btrfs_unlock_extent(&inode->io_tree, range_start, range_end, &cached_state);
fs/btrfs/file.c
2861
u64 range_end;
fs/btrfs/file.c
2869
range_end = round_up(end, root->fs_info->sectorsize);
fs/btrfs/file.c
2872
range_end - range_start);
fs/btrfs/inode.c
1114
.range_end = end,
fs/btrfs/inode.c
7507
u64 range_end;
fs/btrfs/inode.c
7509
range_end = round_up(offset + nocow_args.file_extent.num_bytes,
fs/btrfs/inode.c
7511
ret = btrfs_test_range_bit_exists(io_tree, offset, range_end,
fs/btrfs/inode.c
7723
u64 range_end;
fs/btrfs/inode.c
7730
range_end = page_end;
fs/btrfs/inode.c
7745
range_end = ordered->file_offset - 1;
fs/btrfs/inode.c
7750
range_end = min(ordered->file_offset + ordered->num_bytes - 1,
fs/btrfs/inode.c
7752
ASSERT(range_end + 1 - cur < U32_MAX);
fs/btrfs/inode.c
7753
range_len = range_end + 1 - cur;
fs/btrfs/inode.c
7774
btrfs_clear_extent_bit(tree, cur, range_end,
fs/btrfs/inode.c
7792
cur, range_end + 1 - cur)) {
fs/btrfs/inode.c
7818
btrfs_qgroup_free_data(inode, NULL, cur, range_end + 1 - cur, NULL);
fs/btrfs/inode.c
7820
btrfs_clear_extent_bit(tree, cur, range_end, EXTENT_LOCKED |
fs/btrfs/inode.c
7824
cur = range_end + 1;
fs/btrfs/ordered-data.c
756
u64 range_end;
fs/btrfs/ordered-data.c
765
range_end = range_start + range_len;
fs/btrfs/ordered-data.c
774
if (range_end <= ordered->disk_bytenr ||
fs/btrfs/reflink.c
64
const u64 range_end = file_offset + block_size - 1;
fs/btrfs/reflink.c
97
btrfs_clear_extent_bit(&inode->io_tree, file_offset, range_end,
fs/btrfs/reflink.c
99
ret = btrfs_set_extent_delalloc(inode, file_offset, range_end, 0, NULL);
fs/btrfs/tree-log.c
2654
u64 range_end;
fs/btrfs/tree-log.c
2686
range_end = 0;
fs/btrfs/tree-log.c
2689
range_end = (u64)-1;
fs/btrfs/tree-log.c
2692
&range_start, &range_end);
fs/btrfs/tree-log.c
2736
if (found_key.offset > range_end)
fs/btrfs/tree-log.c
2747
if (range_end == (u64)-1)
fs/btrfs/tree-log.c
2749
range_start = range_end + 1;
fs/ceph/addr.c
1097
ceph_wbc->end = wbc->range_end >> PAGE_SHIFT;
fs/ceph/addr.c
1098
if (wbc->range_start == 0 && wbc->range_end == LLONG_MAX)
fs/dax.c
1204
pgoff_t end_index = wbc->range_end >> PAGE_SHIFT;
fs/ext4/inode.c
2847
if (wbc->range_start == 0 && wbc->range_end == LLONG_MAX)
fs/ext4/inode.c
2858
mpd->end_pos = wbc->range_end;
fs/ext4/inode.c
3048
.range_end = jinode->i_dirty_end,
fs/ext4/super.c
557
.range_end = jinode->i_dirty_end,
fs/f2fs/data.c
3283
end = wbc->range_end >> PAGE_SHIFT;
fs/f2fs/data.c
3284
if (wbc->range_start == 0 && wbc->range_end == LLONG_MAX)
fs/fs-writeback.c
1967
.range_end = LLONG_MAX,
fs/fs-writeback.c
2965
.range_end = LLONG_MAX,
fs/gfs2/aops.c
308
end = wbc->range_end >> PAGE_SHIFT;
fs/gfs2/aops.c
309
if (wbc->range_start == 0 && wbc->range_end == LLONG_MAX)
fs/gfs2/log.c
246
.range_end = LLONG_MAX,
fs/iomap/buffered-io.c
138
struct iomap_folio_state *ifs, u64 *range_start, u64 range_end)
fs/iomap/buffered-io.c
144
offset_in_folio(folio, range_end) >> inode->i_blkbits,
fs/iomap/buffered-io.c
162
u64 range_end)
fs/iomap/buffered-io.c
166
if (*range_start >= range_end)
fs/iomap/buffered-io.c
170
return ifs_find_dirty_range(folio, ifs, range_start, range_end);
fs/iomap/buffered-io.c
171
return range_end - *range_start;
fs/netfs/buffered_write.c
118
.range_end = iocb->ki_pos + iter->count,
fs/nfs/nfstrace.h
301
loff_t range_end
fs/nfs/nfstrace.h
304
TP_ARGS(inode, range_start, range_end),
fs/nfs/nfstrace.h
312
__field(loff_t, range_end)
fs/nfs/nfstrace.h
323
__entry->range_end = range_end;
fs/nfs/nfstrace.h
332
__entry->range_start, __entry->range_end
fs/nfs/nfstrace.h
341
loff_t range_end \
fs/nfs/nfstrace.h
343
TP_ARGS(inode, range_start, range_end))
fs/nfs/write.c
2042
.range_end = range_start + len - 1,
fs/nfs/write.c
2076
.range_end = range_start + len - 1,
fs/nfs/write.c
660
trace_nfs_writepages(inode, wbc->range_start, wbc->range_end - wbc->range_start);
fs/nfs/write.c
701
trace_nfs_writepages_done(inode, wbc->range_start, wbc->range_end - wbc->range_start, err);
fs/nilfs2/inode.c
173
wbc->range_end);
fs/ocfs2/alloc.c
6962
u64 range_start, u64 range_end)
fs/ocfs2/alloc.c
6982
range_end = min_t(u64, range_end, i_size_read(inode));
fs/ocfs2/alloc.c
6983
if (range_start >= range_end)
fs/ocfs2/alloc.c
7009
ret = ocfs2_grab_eof_folios(inode, range_start, range_end, folios,
fs/ocfs2/alloc.c
7016
ocfs2_zero_cluster_folios(inode, range_start, range_end, folios,
fs/ocfs2/alloc.c
7025
range_end - 1);
fs/ocfs2/alloc.h
216
u64 range_start, u64 range_end);
fs/ocfs2/file.c
1001
zero_start = range_end;
fs/ocfs2/file.c
862
u64 *range_start, u64 *range_end)
fs/ocfs2/file.c
890
*range_end = 0;
fs/ocfs2/file.c
922
*range_end = ocfs2_clusters_to_bytes(inode->i_sb,
fs/ocfs2/file.c
934
u64 range_end, struct buffer_head *di_bh)
fs/ocfs2/file.c
943
(unsigned long long)range_end);
fs/ocfs2/file.c
944
BUG_ON(range_start >= range_end);
fs/ocfs2/file.c
946
while (zero_pos < range_end) {
fs/ocfs2/file.c
948
if (next_pos > range_end)
fs/ocfs2/file.c
949
next_pos = range_end;
fs/ocfs2/file.c
971
u64 zero_start, range_start = 0, range_end = 0;
fs/ocfs2/file.c
982
&range_end);
fs/ocfs2/file.c
987
if (!range_end)
fs/ocfs2/file.c
992
if (range_end > zero_to_size)
fs/ocfs2/file.c
993
range_end = zero_to_size;
fs/ocfs2/file.c
996
range_end, di_bh);
include/acpi/actbl2.h
2102
u32 range_end; /* End of domain range */
include/drm/drm_mm.h
236
u64 range_end;
include/linux/writeback.h
55
loff_t range_end;
include/trace/events/btrfs.h
689
__field( loff_t, range_end )
include/trace/events/btrfs.h
702
__entry->range_end = wbc->range_end;
include/trace/events/btrfs.h
716
__entry->range_start, __entry->range_end,
include/trace/events/ext4.h
458
__field( loff_t, range_end )
include/trace/events/ext4.h
471
__entry->range_end = wbc->range_end;
include/trace/events/ext4.h
484
__entry->range_end, __entry->sync_mode,
include/trace/events/f2fs.h
1529
__field(loff_t, range_end)
include/trace/events/f2fs.h
1547
__entry->range_end = wbc->range_end;
include/trace/events/f2fs.h
1566
__entry->range_end,
include/trace/events/writeback.h
492
__field(long, range_end)
include/trace/events/writeback.h
505
__entry->range_end = (long)wbc->range_end;
include/trace/events/writeback.h
519
__entry->range_end,
mm/filemap.c
378
.range_end = end,
mm/page-writeback.c
2419
return wbc->range_end >> PAGE_SHIFT;
net/bridge/br_private.h
1626
const struct net_bridge_vlan *range_end);
net/bridge/br_private.h
1873
const struct net_bridge_vlan *range_end)
net/bridge/br_private.h
1888
const struct net_bridge_vlan *range_end);
net/bridge/br_private.h
1895
struct net_bridge_vlan *range_end,
net/bridge/br_vlan.c
1980
const struct net_bridge_vlan *range_end)
net/bridge/br_vlan.c
1982
return v_curr->vid - range_end->vid == 1 &&
net/bridge/br_vlan.c
1983
range_end->flags == v_curr->flags &&
net/bridge/br_vlan.c
1984
br_vlan_opts_eq_range(v_curr, range_end);
net/bridge/br_vlan.c
1992
struct net_bridge_vlan *v, *range_start = NULL, *range_end = NULL;
net/bridge/br_vlan.c
2047
range_end = v;
net/bridge/br_vlan.c
2052
if (br_vlan_global_opts_can_enter_range(v, range_end))
net/bridge/br_vlan.c
2055
range_end->vid,
net/bridge/br_vlan.c
2061
idx += range_end->vid - range_start->vid + 1;
net/bridge/br_vlan.c
2065
!br_vlan_can_enter_range(v, range_end)) {
net/bridge/br_vlan.c
2069
range_end->vid, range_start,
net/bridge/br_vlan.c
2075
idx += range_end->vid - range_start->vid + 1;
net/bridge/br_vlan.c
2080
range_end = v;
net/bridge/br_vlan.c
2091
range_end->vid, range_start))
net/bridge/br_vlan.c
2095
range_end->vid, range_start,
net/bridge/br_vlan.c
2264
struct net_bridge_vlan *range_start, *range_end;
net/bridge/br_vlan.c
2268
range_end = br_vlan_find(vg, vinfo->vid);
net/bridge/br_vlan.c
2271
range_end = range_start;
net/bridge/br_vlan.c
2274
err = br_vlan_process_options(br, p, range_start, range_end,
net/bridge/br_vlan_options.c
286
struct net_bridge_vlan *range_end,
net/bridge/br_vlan_options.c
304
if (!range_end || !br_vlan_should_use(range_end)) {
net/bridge/br_vlan_options.c
310
for (vid = range_start->vid; vid <= range_end->vid; vid++) {
net/bridge/br_vlan_options.c
33
const struct net_bridge_vlan *range_end)
net/bridge/br_vlan_options.c
35
return (!v_curr->tinfo.tunnel_dst && !range_end->tinfo.tunnel_dst) ||
net/bridge/br_vlan_options.c
36
vlan_tunid_inrange(v_curr, range_end);
net/bridge/br_vlan_options.c
41
const struct net_bridge_vlan *range_end)
net/bridge/br_vlan_options.c
43
u8 range_mc_rtr = br_vlan_multicast_router(range_end);
net/bridge/br_vlan_options.c
46
if (v_curr->state != range_end->state)
net/bridge/br_vlan_options.c
49
if (!__vlan_tun_can_enter_range(v_curr, range_end))
net/bridge/br_vlan_options.c
56
if ((v_curr->priv_flags ^ range_end->priv_flags) &
net/bridge/br_vlan_options.c
64
&range_end->port_mcast_ctx))
tools/testing/radix-tree/maple.c
34401
unsigned long range_end;
tools/testing/radix-tree/maple.c
34502
mas_for_each(&mas, entry, test->range_end) {
tools/testing/radix-tree/maple.c
34854
vals.range_end = ULONG_MAX;
tools/testing/radix-tree/maple.c
34878
vals.range_end = 5035;
tools/testing/radix-tree/maple.c
34917
vals.range_end = 5035;
tools/testing/selftests/kvm/include/sparsebit.h
82
#define sparsebit_for_each_set_range(s, range_begin, range_end) \
tools/testing/selftests/kvm/include/sparsebit.h
84
range_end = sparsebit_next_clear(s, range_begin) - 1; \
tools/testing/selftests/kvm/include/sparsebit.h
85
range_begin && range_end; \
tools/testing/selftests/kvm/include/sparsebit.h
86
range_begin = sparsebit_next_set(s, range_end), \
tools/testing/selftests/kvm/include/sparsebit.h
87
range_end = sparsebit_next_clear(s, range_begin) - 1)