memchr_inv
if (memchr_inv(base, 0xff, NVRAM_SIZE)) {
if (memchr_inv(base, 0xff, NVRAM_SIZE)) {
return !memchr_inv((void *)page, PAGE_UNUSED, PMD_SIZE);
if (!memchr_inv(mdesc->vmpck, 0, VMPCK_KEY_LEN)) {
if (!mdesc->vmpck || !memchr_inv(mdesc->vmpck, 0, VMPCK_KEY_LEN)) {
pcmd_page_empty = !memchr_inv(pcmd_page, 0, PAGE_SIZE);
if (memchr_inv(pcmd_page, 0, PAGE_SIZE))
if (memchr_inv(secinfo->reserved, 0, sizeof(secinfo->reserved)))
if (memchr_inv(hdr->reserved, 0, sizeof(hdr->reserved)))
if (memchr_inv(init_vm->reserved, 0, sizeof(init_vm->reserved))) {
return !memchr_inv((void *)start, PAGE_UNUSED, PMD_SIZE);
if (memchr_inv(arg.reserved, 0, sizeof(arg.reserved)))
if (memchr_inv(arg.reserved, 0, sizeof(arg.reserved)))
if (memchr_inv(arg.reserved, 0, sizeof(arg.reserved)))
return !memchr_inv(page_address(p) + offset, 0, len);
if (memchr_inv(params->p, 0, params->p_size) == NULL)
return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
memchr_inv(sig + vecs->c_size, 0, sig_size - vecs->c_size)) {
memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
if (memchr_inv(bvec_virt(&bv), 0, bv.bv_len))
pcr_select_offset = memchr_inv(pcr_selection.pcr_select, 0,
return memchr_inv(&pll_state->pll[C10_PLL_SSC_REG_START_IDX],
if (memchr_inv(vaddr, *out, PAGE_SIZE)) {
if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) {
if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) {
if (memchr_inv((void __force *)map, POISON_FREE, obj->base.size)) {
if (memchr_inv(vaddr, POISON_FREE, obj->base.size)) {
if (memchr_inv(vaddr, CONTEXT_REDZONE, I915_GTT_PAGE_SIZE))
if (memchr_inv(vaddr, POISON_INUSE, I915_GTT_PAGE_SIZE)) {
if (!memchr_inv(history, act, sizeof(history)))
if (memchr_inv(result, value, sizeof(result))) {
if (memchr_inv(scratch, POISON_FREE, PAGE_SIZE)) {
memchr_inv(&((STRUCT).MEMBER), \
if (!memchr_inv(data, 0x00, 13) && !(data[13] & BIT(7)))
if (!memchr_inv(data, 0x00, 16) && !FIELD_GET(GENMASK(5, 0), data[16]))
if (memchr_inv(kern_spec_filter +
return !memchr_inv((const void *)&uattr->data + len,
!memchr_inv(reserved, 0, sizeof(reserved))
memchr_inv((void *)&filter.field +\
if (memchr_inv(ucmd.reserved, 0, sizeof(ucmd.reserved)))
memchr_inv((void *)&filter.field + sizeof(filter.field), 0, \
!(memchr_inv(MLX5_ADDR_OF(fte_match_param, match_criteria, headers), \
memchr_inv(&ucmd.burst_info.reserved, 0,
if (memchr_inv(priv->dev->dev_addr, 0, INFINIBAND_ALEN))
if (!memchr_inv(keyboard_protocol->keys_pressed, 1, MAX_ROLLOVER))
if (!memchr_inv(skb->data, 0xff, l)) {
if (!memchr_inv(skb->data, 0x2a, l)) {
if (memchr_inv(ic->sb, 0, SB_SECTORS << SECTOR_SHIFT)) {
bad = memchr_inv(read_buf, pattern, chunk);
if (memchr_inv(data_buf, 0xff, data_len)) {
return memchr_inv(key, 0, sizeof(*key)) ? true : false;
if (!memchr_inv(labels, 0, sizeof(u32) * 4)) {
if (memchr_inv(&enc_ports.mask->dst, 0xff,
if (memchr_inv(&enc_opts.mask->data, 0, sizeof(enc_opts.mask->data)) &&
!memchr_inv(option_mask->opt_data, 0, option_mask->length * 4))
if (!memchr_inv(option_key->opt_data, 0, option_key->length * 4)) {
if (memchr_inv(&enc_opts.mask->data, 0, sizeof(enc_opts.mask->data)) &&
memchr_inv(opt->opt_data, 0, opt->length * 4)) {
if (!memchr_inv(misc2, 0, MLX5_ST_SZ_BYTES(fte_match_set_misc2)))
if (!memchr_inv(mask_value, 0, len)) /* If mask is zero */
if (memchr_inv(&req->fw_handle, 0, sizeof(req->fw_handle)) &&
if (memchr_inv(ct.key, 0, sizeof(*ct.key)))
if (memchr_inv(&ipv6_addrs.mask->dst, 0xff,
if (memchr_inv(fm.mask->ct_labels, 0, sizeof(fm.mask->ct_labels))) {
memchr_inv(match->mask.eth_saddr, 0, ETH_ALEN) ||
memchr_inv(match->mask.eth_daddr, 0, ETH_ALEN) ||
return !memchr_inv(addr, 0xff, sizeof(*addr));
if (memchr_inv(fa->ct_metadata.labels, 0, sizeof(fa->ct_metadata.labels))) {
memchr_inv(&info->key.u, 0, sizeof(info->key.u)));
if (memchr_inv(buf + i, '\0', min(block_size - 1, len - i)))
fmts_ptr = memchr_inv(fmts_ptr, 0, fmts_end_ptr - fmts_ptr);
le32_encode_bits(!!memchr_inv(gtk_info->txmickey, 0,
!memchr_inv(ids->eui64, 0, sizeof(ids->eui64)))
!memchr_inv(ids->nguid, 0, sizeof(ids->nguid)))
if (memchr_inv(ids->nguid, 0, sizeof(ids->nguid))) {
if (memchr_inv(ids->eui64, 0, sizeof(ids->eui64))) {
bool has_nguid = memchr_inv(ids->nguid, 0, sizeof(ids->nguid));
bool has_eui64 = memchr_inv(ids->eui64, 0, sizeof(ids->eui64));
if (memchr_inv(ids->nguid, 0, sizeof(ids->nguid)))
if (memchr_inv(ids->eui64, 0, sizeof(ids->eui64)))
!memchr_inv(ids->nguid, 0, sizeof(ids->nguid)))
if (!memchr_inv(ids->nguid, 0, sizeof(ids->nguid)))
if (!memchr_inv(ids->eui64, 0, sizeof(ids->eui64)))
if (memchr_inv(&req->ns->uuid, 0, sizeof(req->ns->uuid))) {
if (memchr_inv(req->ns->nguid, 0, sizeof(req->ns->nguid))) {
return memchr_inv(dev->policy_buf, 0xff, dev->policy_sz);
if (memchr_inv(data.reserved, 0, sizeof(data.reserved))) {
if (memchr_inv(ioctl->reserved14, 0, sizeof(ioctl->reserved14)))
if (!memchr_inv(vport->lpfc_vmid_host_uuid, 0,
if (!memchr_inv(vport->lpfc_vmid_host_uuid, 0,
not_zero = memchr_inv(buf, 0x00, cmd->data_length);
if (memchr_inv(loi->reserved, 0, sizeof(loi->reserved))) {
if (memchr_inv(args.reserved, 0, sizeof(args.reserved)))
if (memchr_inv(data->args.reserved, 0, sizeof(data->args.reserved)))
return memchr_inv(tail_data, 0, PAGE_SIZE - partial) ? true : false;
if (memchr_inv(arg.__reserved, 0, sizeof(arg.__reserved)))
if (memchr_inv(arg.__reserved, 0, sizeof(arg.__reserved)))
if (memchr_inv(arg.__reserved, 0, sizeof(arg.__reserved)))
if (memchr_inv(policy->__reserved, 0, sizeof(policy->__reserved))) {
padend = memchr_inv(padbuf, 0, padbufsize);
if (memchr_inv(p_boot->must_be_zero, 0, sizeof(p_boot->must_be_zero)))
ret = memchr_inv(buf, c, size);
if (memchr_inv(head.fmh_reserved, 0, sizeof(head.fmh_reserved)) ||
memchr_inv(head.fmh_keys[0].fmr_reserved, 0,
memchr_inv(head.fmh_keys[1].fmr_reserved, 0,
return !memchr_inv(lvb + JID_BITMAP_OFFSET, 0,
ptr = memchr_inv(start, 0, bytes);
memchr_inv((void *)fid + NSFS_FID_SIZE_U32_LATEST, 0,
return !memchr_inv(hmac, 0, c->hmac_desc_len);
memchr_inv(arg.__reserved2, 0, sizeof(arg.__reserved2)))
if (memchr_inv(desc->__reserved, 0, sizeof(desc->__reserved))) {
if (memchr_inv(dblock->data, 0, params->block_size)) {
if (memchr_inv(rsb + 1, 0, BBTOB(bp->b_length) - sizeof(*rsb)))
if (memchr_inv(sbp->sb_pad, 0, sizeof(sbp->sb_pad))) {
if (memchr_inv(&sb->sb_features_compat, 0,
if (memchr_inv(sb->sb_pad, 0, sizeof(sb->sb_pad)))
if (memchr_inv((char *)sb + sblen, 0, BBTOB(bp->b_length) - sblen))
if (memchr_inv(sm->sm_reserved, 0, sizeof(sm->sm_reserved)))
return !memchr_inv(ptr, 0, array->obj_size);
if (memchr_inv(&args.pad, 0, sizeof(args.pad)))
if (memchr_inv(head.fmh_reserved, 0, sizeof(head.fmh_reserved)) ||
memchr_inv(head.fmh_keys[0].fmr_reserved, 0,
memchr_inv(head.fmh_keys[1].fmr_reserved, 0,
if (memchr_inv(&hmo->pad, 0, sizeof(hmo->pad)))
if (memchr_inv(&src->pad32, 0, sizeof(src->pad32)) ||
memchr_inv(src->pad64, 0, sizeof(src->pad64)))
memchr_inv(hdr->reserved, 0, sizeof(hdr->reserved)))
if (memchr_inv(&ageo.ag_reserved, 0, sizeof(ageo.ag_reserved)))
if (memchr_inv(&rgeo.rg_reserved, 0, sizeof(rgeo.rg_reserved)))
if (memchr_inv(super->s_reserved, 0, sizeof(super->s_reserved))) {
macro(memchr_inv), \
void *__real_memchr_inv(const void *s, int c, size_t n) __RENAME(memchr_inv);
__FORTIFY_INLINE void *memchr_inv(const void * const POS0 p, int c, size_t size)
return memchr_inv(src.kernel + offset, 0, size) == NULL;
void *memchr_inv(const void *s, int c, size_t n);
return !memchr_inv(s, 0, n);
memchr_inv(src + size, 0, rest) != NULL;
ret = !memchr_inv(buf, 0, len);
if (memchr_inv(®->__resv, 0, sizeof(reg->__resv)))
if (memchr_inv(®.__resv, 0, sizeof(reg.__resv)))
if (memchr_inv(p, 0, size))
if (memchr_inv(®.__resv, 0, sizeof(reg.__resv)))
if (memchr_inv(buf.pad, 0, sizeof(buf.pad)))
memchr_inv(data + prev_mend, 0, moff - prev_mend))
memchr_inv(data + prev_mend, 0, t->size - prev_mend))
if (memchr_inv(udata + moff, 0, msize)) {
res = memchr_inv(uaddr.kernel + expected_size, 0,
memchr_inv((void *) &attr->CMD##_LAST_FIELD + \
EXPORT_SYMBOL(memchr_inv);
memchr_inv(small, 0x7A, sizeof(small) + 1)
p = memchr_inv(&r[1], 0, sizeof(r) - sizeof(r[0]));
KUNIT_ASSERT_PTR_EQ(test, memchr_inv(haystack, needle, len),
KUNIT_ASSERT_PTR_EQ(test, memchr_inv(haystack, needle, len + 1),
KUNIT_ASSERT_PTR_EQ(test, memchr_inv(haystack, needle, len * 2),
if (memchr_inv(alloced_buffer, FILL_CHAR, PAD_SIZE)) {
if (memchr_inv(test_buffer, FILL_CHAR, BUF_SIZE + PAD_SIZE)) {
if (memchr_inv(test_buffer + written + 1, FILL_CHAR, bufsize - (written + 1))) {
if (memchr_inv(test_buffer + bufsize, FILL_CHAR, BUF_SIZE + PAD_SIZE - bufsize)) {
return memchr_inv(from, 0x0, size) == NULL;
start = memchr_inv(mem, PAGE_POISON, bytes);
fault = memchr_inv(kasan_reset_tag(start), value, bytes);
fault = memchr_inv(kasan_reset_tag(pad), POISON_INUSE, remainder);
VM_WARN_ON_ONCE(walk && memchr_inv(walk->nr_pages, 0, sizeof(walk->nr_pages)));
VM_WARN_ON_ONCE(walk && memchr_inv(walk->mm_stats, 0, sizeof(walk->mm_stats)));
VM_WARN_ON_ONCE(memchr_inv(lruvec->lrugen.nr_pages, 0,
if (memchr_inv(pzstats->vm_stat_diff, 0, sizeof(pzstats->vm_stat_diff)))
if (memchr_inv(n->vm_node_stat_diff, 0, sizeof(n->vm_node_stat_diff)))
return !memchr_inv((u8 *)buf + from, 0, to - from);
if (addr_cnt == 1 && !memchr_inv(&tmp_addr, 0, sizeof(tmp_addr)))
if (a[0] < 31 && !memchr_inv(a, a[0], ETH_ALEN))
if (a[0] < 31 && !memchr_inv(a, a[0], ETH_ALEN))
if (!memchr_inv(&rx, 0xff, sizeof(rx)))
if (!memchr_inv(&tx, 0xff, sizeof(tx)))
if (!memchr_inv(&rx_sum, 0xff, sizeof(rx_sum)) &&
!memchr_inv(&tx_sum, 0xff, sizeof(tx_sum)))
if (memchr_inv(dev->perm_addr, '\0', dev->addr_len) &&
if (!memchr_inv(map + start_word, '\0',
!memchr_inv(ethtool_rxfh_context_key(ctx), 0,
!memchr_inv(ethtool_rxfh_context_key(ctx), 0,
if (memchr_inv(&ipencap, 0, sizeof(ipencap))) {
return (!memchr_inv(nft_set_ext_key(&rbe->ext), 0, set->klen) &&
if (mask && !memchr_inv(mask, 0, len))
if (!memchr_inv(labels_m, 0, labels_sz))
memchr_inv(((char *)mask) + FL_KEY_MEMBER_OFFSET(member), \
if (!memchr_inv(mask, 0, len))
if (!memchr_inv(vlan_mask, 0, sizeof(*vlan_mask)))
if (!memchr_inv(&flags_mask, 0, sizeof(flags_mask)))
if (!memchr_inv(mask, 0, sizeof(*mask)))
if (memchr_inv(kdfcopy->__spare, 0, sizeof(kdfcopy->__spare))) {
return !memchr_inv(&masks->access, 0, sizeof(masks->access));
extern void *memchr_inv(const void *start, int c, size_t bytes);
return bid && bid->size ? !!memchr_inv(bid->data, 0, bid->size) : false;
!memchr_inv(&dso_bid->data[bid->size], 0,