x86_stepping
&& boot_cpu_data.x86_stepping < 10) {
if (boot_cpu_data.x86_stepping < 9) {
if (boot_cpu_data.x86_stepping == 11)
if ((boot_cpu_data.x86_model == 0x0f && boot_cpu_data.x86_stepping >= 0x0e) ||
boot_cpu_data.x86_stepping < 0x0A)
unsigned int x86_stepping(unsigned int sig);
__u8 x86_stepping;
c->x86_stepping >= 0x0e)) {
boot_cpu_data.x86_stepping >= 0x1))
OFFSET(CPUINFO_x86_stepping, cpuinfo_x86, x86_stepping);
if (c->x86_model == 3 && c->x86_stepping == 0)
(c->x86_stepping == 0 || c->x86_stepping == 1))
if (c->x86_model == 6 && c->x86_stepping == 1) {
(c->x86_model == 8 && c->x86_stepping < 8)) {
if ((c->x86_model == 8 && c->x86_stepping > 7) ||
if ((c->x86_model == 8 && c->x86_stepping >= 1) || (c->x86_model > 8)) {
if ((c->x86_model == 6) && ((c->x86_stepping == 0) ||
(c->x86_stepping == 1)))
if ((c->x86_model == 7) && (c->x86_stepping == 0))
if (((c->x86_model == 6) && (c->x86_stepping >= 2)) ||
((c->x86_model == 7) && (c->x86_stepping >= 1)) ||
(c->x86_model == 8 && c->x86_stepping >= 8))
(c->x86_model == 0x41 && c->x86_stepping >= 0x2))
(c->x86_model == 0x2 && c->x86_stepping >= 0x1))
if (c->x86_model == 0x47 && c->x86_stepping == 0x0) {
switch (c->x86_stepping) {
(c->x86_stepping == 1) && (size == 65))
c->x86_model = c->x86_stepping = 0; /* So far unknown... */
if (c->x86_stepping || c->cpuid_level >= 0)
pr_cont(", stepping: 0x%x)\n", c->x86_stepping);
c->x86_stepping = x86_stepping(tfms);
c->x86_stepping = dir1 & 0xf;
c->x86_stepping == spectre_bad_microcodes[i].stepping)
if (c->x86_vfm == INTEL_ATOM_BONNELL && c->x86_stepping <= 2 &&
(c->x86_stepping == 0x3 || c->x86_stepping == 0x4))
boot_cpu_data.x86_stepping < 8) {
c->x86_stepping >= 1 && c->x86_stepping <= 4) {
if ((c->x86_vfm == INTEL_PENTIUM_II_KLAMATH && c->x86_stepping < 3) ||
if (c->x86_vfm == INTEL_P4_WILLAMETTE && c->x86_stepping == 1) {
(c->x86_stepping < 0x6 || c->x86_stepping == 0xb))
else if (c->x86_stepping == 0 || c->x86_stepping == 5)
!(BIT(c->x86_stepping) & m->steppings))
if ((c->x86 == 6 && c->x86_model == 0xf && c->x86_stepping >= 0xe) ||
if (s->cpu_minstepping && boot_cpu_data.x86_stepping < s->cpu_minstepping)
x86_family(eax), x86_model(eax), x86_stepping(eax));
c->x86_stepping == 0x01 &&
c->x86, c->x86_model, c->x86_stepping);
boot_cpu_data.x86_stepping <= 7) {
if (c->x86_stepping || c->cpuid_level >= 0)
seq_printf(m, "stepping\t: %d\n", c->x86_stepping);
if (boot_cpu_data.x86_stepping <= 4)
(boot_cpu_data.x86_model << 4) | boot_cpu_data.x86_stepping;
return x86_stepping(best->eax);
EXPORT_SYMBOL_GPL(x86_stepping);
if ((c->x86_model == 9) && (c->x86_stepping > 7))
(c->x86_stepping == 8)) {
switch (c->x86_stepping) {
if (c->x86_stepping < 8) {
switch (c->x86_stepping) {
cpuid = (c->x86 << 8) | (c->x86_model << 4) | c->x86_stepping;
if ((c->x86_model == 6) && (c->x86_stepping == 0)) {
(c->x86_stepping == x->x86_stepping))
__u8 x86_stepping; /* stepping */
pr_debug("ebx value is %x, x86_stepping is %x\n", ebx, c->x86_stepping);
switch (c->x86_stepping) {
if (c->x86_stepping == 0x01) {
if (c->x86 == 6 && c->x86_model == 15 && c->x86_stepping == 2) {
pvt->stepping = boot_cpu_data.x86_stepping;
(boot_cpu_data.x86_stepping >= 1);
fam, x86_model(m->cpuid), x86_stepping(m->cpuid),
tm->stepping_mask == c->x86_stepping))
if (c->x86_vfm == INTEL_CORE2_MEROM && c->x86_stepping < 4)
if (c->x86_vfm == INTEL_CORE_YONAH && c->x86_stepping < 0xc && c->microcode < 0x39) {
vrm_ret = find_vrm(c->x86, c->x86_model, c->x86_stepping, c->x86_vendor);
(boot_cpu_data.x86_model == 4 && boot_cpu_data.x86_stepping <= 2);
stepping = boot_cpu_data.x86_stepping;
boot_cpu_data.x86_stepping, ifsd->cur_batch, test->image_suffix);
boot_cpu_data.x86_stepping, ifsd->cur_batch, test->image_suffix);
if (cpu_data(0).x86_stepping == 1) {