__kaslr_offset
extern unsigned long __kaslr_offset;
return __kaslr_offset;
update_kaslr_offset(&__kaslr_offset, offset);
if (__kaslr_offset > 0) {
pr_cont("Kernel relocated by 0x%p\n", (void *)__kaslr_offset);
unsigned long __kaslr_offset __ro_after_init;
EXPORT_SYMBOL(__kaslr_offset);
#define __kernel_va(x) ((void *)((unsigned long)(x) - __kaslr_offset_phys + __kaslr_offset))
#define __kernel_pa(x) ((unsigned long)(x) - __kaslr_offset + __kaslr_offset_phys)
boot_emerg("Kernel random base: %lx\n", __kaslr_offset);
__kaslr_offset = kernel_start;
boot_debug("__kaslr_offset: 0x%016lx\n", __kaslr_offset);
kaslr_large_page_offset = __kaslr_offset & ~_SEGMENT_MASK;
__kaslr_offset, __kaslr_offset_phys);
kaslr_adjust_got(__kaslr_offset);
setup_vmem(__kaslr_offset, __kaslr_offset + kernel_size, asce_limit);
psw.addr = __kaslr_offset + vmlinux.entry;
return __kaslr_offset;
if (x < __kaslr_offset)
return x - __kaslr_offset + __kaslr_offset_phys;