cpu_asid_mask
asid = read_csr_asid() & cpu_asid_mask(¤t_cpu_data);
return ~(u64)(cpu_asid_mask(&cpu_data[cpu]));
return cpu_asid_mask(&cpu_data[cpu]) + 1;
#define cpu_asid(cpu, mm) (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu]))
if (!((++asid) & cpu_asid_mask(&cpu_data[cpu])))
unsigned long asidmask = cpu_asid_mask(¤t_cpu_data);
#define KVM_ENTRYHI_ASID cpu_asid_mask(&boot_cpu_data)
(cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu]))
write_c0_memorymapid(ctx & cpu_asid_mask(&cpu_data[cpu]));
unsigned long asid_mask = cpu_asid_mask(&cpu_data[cpu]);
WARN_ON(asid_mask != cpu_asid_mask(c));
unsigned long asidmask = cpu_asid_mask(¤t_cpu_data);
asid_mask = cpu_asid_mask(¤t_cpu_data);
mmid_mask = cpu_asid_mask(&boot_cpu_data);
write_c0_memorymapid(ctx & cpu_asid_mask(&boot_cpu_data));
if (!((asid += cpu_asid_inc()) & cpu_asid_mask(&cpu_data[cpu]))) {
__set_bit(mmid & cpu_asid_mask(&cpu_data[cpu]), mmid_map);
unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data);
unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data);
unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data);
old_ctx = read_c0_entryhi() & cpu_asid_mask(¤t_cpu_data);
unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data);
pid = read_c0_entryhi() & cpu_asid_mask(¤t_cpu_data);
asid = entryhi & cpu_asid_mask(¤t_cpu_data);
if (asid > cpu_asid_mask(¤t_cpu_data)) {
if (asid > cpu_asid_mask(¤t_cpu_data)) {