check_mul_overflow
if (check_mul_overflow(mcnt, freq_ratio, &mcnt) || !mcnt)
if (unlikely(check_mul_overflow(count, size, &bytes)))
check_mul_overflow((u64)U32_MAX * 128, bus_cycle_ns, &unused))
if (check_mul_overflow(nr_hd, nr_sect, &cylblk)) {
if (check_mul_overflow(cylblk, blksize, &cylblk)) {
if (check_mul_overflow(blk, (sector_t) blksize, &blk)) {
if (check_mul_overflow((u32)(get_dbc_req_elem_size() + get_dbc_rsp_elem_size()),
if (check_mul_overflow((unsigned long)args->hdr.count,
if (check_mul_overflow((u64)sizeof(*image_out_table), table_nents, &mul_bytes)) {
if (check_mul_overflow(sizeof(*swnodes->ports) +
if (check_mul_overflow((size_t)properties->package.count,
if (check_mul_overflow(args->height, pitch, &size))
if (check_mul_overflow(mode->clock, num, &num))
if (check_mul_overflow(mode->htotal * mode->vtotal, den, &den))
if (unlikely(check_mul_overflow(nmemb, size, &bytes)))
if (check_mul_overflow(width, 4u, &stride))
if (check_mul_overflow(stride, height, &size))
if (check_mul_overflow(count, size, ©_len))
if (check_mul_overflow((size_t)args->gather_data_words, (size_t)4, ©_len)) {
if (check_mul_overflow(args.page_count, sizeof(*states), &states_buf_sz))
if (check_mul_overflow(check, PAGE_SIZE, &check))
if (check_mul_overflow(power * temp, MICRO, &temp_2)) {
if (check_mul_overflow(DECA * st->vfs_out * 40 * BIT(8), energy, &temp)) {
if (check_mul_overflow(temp, val, &temp_2)) {
if (!check_mul_overflow(*val, rescale->numerator, &_val) &&
!check_mul_overflow(*val2, rescale->denominator, &_val2)) {
if (unlikely(check_mul_overflow(gts->num_hwgain, 2 * sizeof(int),
if (check_mul_overflow(gain, mul, &res))
if (check_mul_overflow(helper, coeff, &helper))
if (check_mul_overflow(helper, gain0, &tmp))
if (!check_mul_overflow(coeff, ch, &helper))
if (check_mul_overflow(factor, max_rdma_ctxs, &result))
!check_mul_overflow(io_min, sc->stripes, &io_opt)) {
if (!check_mul_overflow(max_hw_discard_sectors, sc->stripes, &max_hw_discard_sectors)) {
if (unlikely(check_mul_overflow((size_t)src->num_routes,
if (check_mul_overflow(blksz, blocks, &len))
if (WARN_ON_ONCE(check_mul_overflow(wq_size, (u32)rq->mpwqe.mtts_per_wqe,
if (check_mul_overflow((int)item_size, (int)num_items, &inlen)) {
if (unlikely(check_mul_overflow(num_of_dests, 2u, &ref_act_cnt)))
if (check_mul_overflow(pspval.intval, 100,
if (check_mul_overflow(pspval.intval, 1000,
if (check_mul_overflow(LPFC_RAS_MIN_BUFF_POST_SIZE,
if (check_mul_overflow(num_keys, 8, &data_len) ||
if (WARN_ON(check_mul_overflow(npage, PAGE_SIZE, &iova_size) ||
if (check_mul_overflow(npage, PAGE_SIZE, &iova_size) ||
if (check_mul_overflow(h, pitch, &size) ||
check_mul_overflow(size, charcount, &size))
if (check_mul_overflow(var->xres, var->yres, &unused) ||
check_mul_overflow(var->xres_virtual, var->yres_virtual, &unused))
if (check_mul_overflow((u64)alarm_time_sec, (u64)NSEC_PER_SEC,
ret = check_mul_overflow(100, x, &x);
if (check_mul_overflow(block, (sector_t)size, &pos) || pos > MAX_LFS_FILESIZE) {
check_mul_overflow(ptr_size, sizeof(void *), &ptr_size))
if (unlikely(check_mul_overflow(*seq_size, 2, seq_size)))
if (check_mul_overflow((size_t)fattr->cf_acls->a_count,
if (check_mul_overflow((size_t)fattr->cf_dacls->a_count,
if (check_mul_overflow(req->deadtime, SMB_ECHO_INTERVAL,
if (check_mul_overflow(blockcount, per_intent, &logres) ||
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (unlikely(check_mul_overflow(new_n, new_size, &bytes)))
if (check_mul_overflow(factor1, factor2, &bytes))
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (unlikely(check_mul_overflow(new_n, new_size, &bytes)))
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (check_mul_overflow(n, size, &nbytes))
if (check_mul_overflow(n, size, &nbytes))
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (check_mul_overflow(sec, MSEC_PER_SEC, &root_wait)) {
if (check_mul_overflow((unsigned long)p->len, (unsigned long)p->nbufs,
if (check_mul_overflow(attr->max_entries, nr_hash_funcs, &nr_bits) ||
check_mul_overflow(nr_bits / 5, (u32)7, &nr_bits) ||
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (check_mul_overflow(*dst_umax, src_reg->u32_max_value, dst_umax) ||
check_mul_overflow(*dst_umin, src_reg->u32_min_value, dst_umin)) {
if (check_mul_overflow(*dst_smin, src_reg->s32_min_value, &tmp_prod[0]) ||
check_mul_overflow(*dst_smin, src_reg->s32_max_value, &tmp_prod[1]) ||
check_mul_overflow(*dst_smax, src_reg->s32_min_value, &tmp_prod[2]) ||
check_mul_overflow(*dst_smax, src_reg->s32_max_value, &tmp_prod[3])) {
if (check_mul_overflow(*dst_umax, src_reg->umax_value, dst_umax) ||
check_mul_overflow(*dst_umin, src_reg->umin_value, dst_umin)) {
if (check_mul_overflow(*dst_smin, src_reg->smin_value, &tmp_prod[0]) ||
check_mul_overflow(*dst_smin, src_reg->smax_value, &tmp_prod[1]) ||
check_mul_overflow(*dst_smax, src_reg->smin_value, &tmp_prod[2]) ||
check_mul_overflow(*dst_smax, src_reg->smax_value, &tmp_prod[3])) {
if (unlikely(check_mul_overflow(n, size, &bytes)))
if (check_mul_overflow(unit, NSEC_PER_SEC, &priv->nsecs))
if (check_mul_overflow(tmp, priv->burst, &tokens))
if (check_mul_overflow(priv->nsecs, rate_with_burst, &tmp))
if (check_mul_overflow(ret, bsize, &ret))
if (unlikely(check_mul_overflow(bytes, num, &len)))
if (check_mul_overflow(rate, bclk_ratio, &tmp))
if (check_mul_overflow(a, b, &bytes))
if (check_mul_overflow(a, b, &bytes))
if (check_mul_overflow(bytes, c, &bytes))
if (check_mul_overflow(n, size, &bytes))
if (check_mul_overflow(factor1, factor2, &bytes))
if (unlikely(check_mul_overflow(nmemb, size, &bytes)))
if (check_mul_overflow(new_sz, (size_t)2, &new_sz))
if (unlikely(check_mul_overflow(new_n, new_size, &bytes)))