Lines Matching refs:i
476 static i386_insn i; variable
1759 return !((i.types[j].bitfield.byte in match_reg_size()
1761 || (i.types[j].bitfield.word in match_reg_size()
1763 || (i.types[j].bitfield.dword in match_reg_size()
1765 || (i.types[j].bitfield.qword in match_reg_size()
1776 && !((i.types[j].bitfield.unspecified in match_mem_size()
1778 || (i.types[j].bitfield.fword in match_mem_size()
1780 || (i.types[j].bitfield.tbyte in match_mem_size()
1782 || (i.types[j].bitfield.xmmword in match_mem_size()
1784 || (i.types[j].bitfield.ymmword in match_mem_size()
1786 || (i.types[j].bitfield.zmmword in match_mem_size()
1807 for (j = 0; j < i.operands; j++) in operand_size_match()
1818 if (i.types[j].bitfield.mem && !match_mem_size (t, j)) in operand_size_match()
1830 i.error = operand_size_mismatch; in operand_size_match()
1835 gas_assert (i.operands == 2); in operand_size_match()
1844 if (i.types[j].bitfield.mem in operand_size_match()
1877 i.error = operand_type_mismatch; in operand_type_match()
1927 i.error = register_type_mismatch; in operand_type_register_match()
2003 int shift = i.memshift; in fits_in_vec_disp8()
2098 || i.prefix[ADDR_PREFIX]) in offset_in_range()
2139 if ((i.prefix[REX_PREFIX] & prefix & REX_W) in add_prefix()
2140 || ((i.prefix[REX_PREFIX] & (REX_R | REX_X | REX_B)) in add_prefix()
2184 if (i.prefix[q] != 0) in add_prefix()
2190 if (!i.prefix[q]) in add_prefix()
2191 ++i.prefixes; in add_prefix()
2192 i.prefix[q] |= prefix; in add_prefix()
3066 if (i.vex.register_specifier) in build_vex_prefix()
3069 ~register_number (i.vex.register_specifier) & 0xf; in build_vex_prefix()
3070 gas_assert ((i.vex.register_specifier->reg_flags & RegVRex) == 0); in build_vex_prefix()
3077 if (!i.swap_operand in build_vex_prefix()
3078 && i.operands == i.reg_operands in build_vex_prefix()
3079 && i.tm.opcode_modifier.vexopcode == VEX0F in build_vex_prefix()
3080 && i.tm.opcode_modifier.s in build_vex_prefix()
3081 && i.rex == REX_B) in build_vex_prefix()
3083 unsigned int xchg = i.operands - 1; in build_vex_prefix()
3087 temp_type = i.types[xchg]; in build_vex_prefix()
3088 i.types[xchg] = i.types[0]; in build_vex_prefix()
3089 i.types[0] = temp_type; in build_vex_prefix()
3090 temp_op = i.op[xchg]; in build_vex_prefix()
3091 i.op[xchg] = i.op[0]; in build_vex_prefix()
3092 i.op[0] = temp_op; in build_vex_prefix()
3094 gas_assert (i.rm.mode == 3); in build_vex_prefix()
3096 i.rex = REX_R; in build_vex_prefix()
3097 xchg = i.rm.regmem; in build_vex_prefix()
3098 i.rm.regmem = i.rm.reg; in build_vex_prefix()
3099 i.rm.reg = xchg; in build_vex_prefix()
3102 i.tm = t[1]; in build_vex_prefix()
3105 if (i.tm.opcode_modifier.vex == VEXScalar) in build_vex_prefix()
3108 vector_length = i.tm.opcode_modifier.vex == VEX256 ? 1 : 0; in build_vex_prefix()
3110 switch ((i.tm.base_opcode >> 8) & 0xff) in build_vex_prefix()
3129 if (i.tm.opcode_modifier.vexopcode == VEX0F in build_vex_prefix()
3130 && i.tm.opcode_modifier.vexw != VEXW1 in build_vex_prefix()
3131 && (i.rex & (REX_W | REX_X | REX_B)) == 0) in build_vex_prefix()
3136 i.vex.length = 2; in build_vex_prefix()
3137 i.vex.bytes[0] = 0xc5; in build_vex_prefix()
3140 r = (i.rex & REX_R) ? 0 : 1; in build_vex_prefix()
3141 i.vex.bytes[1] = (r << 7 in build_vex_prefix()
3151 i.vex.length = 3; in build_vex_prefix()
3153 switch (i.tm.opcode_modifier.vexopcode) in build_vex_prefix()
3157 i.vex.bytes[0] = 0xc4; in build_vex_prefix()
3161 i.vex.bytes[0] = 0xc4; in build_vex_prefix()
3165 i.vex.bytes[0] = 0xc4; in build_vex_prefix()
3169 i.vex.bytes[0] = 0x8f; in build_vex_prefix()
3173 i.vex.bytes[0] = 0x8f; in build_vex_prefix()
3177 i.vex.bytes[0] = 0x8f; in build_vex_prefix()
3185 i.vex.bytes[1] = (~i.rex & 0x7) << 5 | m; in build_vex_prefix()
3188 w = (i.rex & REX_W) ? 1 : 0; in build_vex_prefix()
3189 if (i.tm.opcode_modifier.vexw == VEXW1) in build_vex_prefix()
3192 i.vex.bytes[2] = (w << 7 in build_vex_prefix()
3210 if (i.vex.register_specifier) in build_evex_prefix()
3212 gas_assert ((i.vrex & REX_X) == 0); in build_evex_prefix()
3214 register_specifier = i.vex.register_specifier->reg_num; in build_evex_prefix()
3215 if ((i.vex.register_specifier->reg_flags & RegRex)) in build_evex_prefix()
3219 if (!(i.vex.register_specifier->reg_flags & RegVRex)) in build_evex_prefix()
3220 i.vex.bytes[3] = 0x8; in build_evex_prefix()
3229 if (!(i.vrex & REX_X)) in build_evex_prefix()
3230 i.vex.bytes[3] = 0x8; in build_evex_prefix()
3235 switch ((i.tm.base_opcode >> 8) & 0xff) in build_evex_prefix()
3254 i.vex.length = 4; in build_evex_prefix()
3255 i.vex.bytes[0] = 0x62; in build_evex_prefix()
3258 switch (i.tm.opcode_modifier.vexopcode) in build_evex_prefix()
3276 i.vex.bytes[1] = (~i.rex & 0x7) << 5 | m; in build_evex_prefix()
3280 if (!(i.vrex & REX_R)) in build_evex_prefix()
3281 i.vex.bytes[1] |= 0x10; in build_evex_prefix()
3285 if ((i.reg_operands + i.imm_operands) == i.operands) in build_evex_prefix()
3291 if ((i.vrex & REX_B)) in build_evex_prefix()
3294 i.vex.bytes[1] &= ~0x40; in build_evex_prefix()
3299 i.vrex &= ~vrex_used; in build_evex_prefix()
3300 gas_assert (i.vrex == 0); in build_evex_prefix()
3303 w = (i.rex & REX_W) ? 1 : 0; in build_evex_prefix()
3304 if (i.tm.opcode_modifier.vexw) in build_evex_prefix()
3306 if (i.tm.opcode_modifier.vexw == VEXW1) in build_evex_prefix()
3320 i.vex.bytes[2] = (w << 7 | register_specifier << 3 | implied_prefix); in build_evex_prefix()
3324 if (i.mask && i.mask->zeroing) in build_evex_prefix()
3325 i.vex.bytes[3] |= 0x80; in build_evex_prefix()
3328 if (!i.rounding) in build_evex_prefix()
3333 switch (i.tm.opcode_modifier.evex) in build_evex_prefix()
3351 i.vex.bytes[3] |= vec_length; in build_evex_prefix()
3353 if (i.broadcast) in build_evex_prefix()
3354 i.vex.bytes[3] |= 0x10; in build_evex_prefix()
3358 if (i.rounding->type != saeonly) in build_evex_prefix()
3359 i.vex.bytes[3] |= 0x10 | (i.rounding->type << 5); in build_evex_prefix()
3361 i.vex.bytes[3] |= 0x10 | (evexrcig << 5); in build_evex_prefix()
3364 if (i.mask && i.mask->mask) in build_evex_prefix()
3365 i.vex.bytes[3] |= i.mask->mask->reg_num; in build_evex_prefix()
3373 if ((i.tm.cpu_flags.bitfield.cpusse3 || i.tm.cpu_flags.bitfield.cpusvme) in process_immext()
3374 && i.operands > 0) in process_immext()
3382 for (x = 0; x < i.operands; x++) in process_immext()
3383 if (register_number (i.op[x].regs) != x) in process_immext()
3385 register_prefix, i.op[x].regs->reg_name, x + 1, in process_immext()
3386 i.tm.name); in process_immext()
3388 i.operands = 0; in process_immext()
3399 gas_assert (i.imm_operands <= 1 in process_immext()
3400 && (i.operands <= 2 in process_immext()
3401 || ((i.tm.opcode_modifier.vex in process_immext()
3402 || i.tm.opcode_modifier.evex) in process_immext()
3403 && i.operands <= 4))); in process_immext()
3405 exp = &im_expressions[i.imm_operands++]; in process_immext()
3406 i.op[i.operands].imms = exp; in process_immext()
3407 i.types[i.operands] = imm8; in process_immext()
3408 i.operands++; in process_immext()
3410 exp->X_add_number = i.tm.extension_opcode; in process_immext()
3411 i.tm.extension_opcode = None; in process_immext()
3418 switch (i.tm.opcode_modifier.hleprefixok) in check_hle()
3424 i.tm.name, i.hle_prefix); in check_hle()
3427 if (i.prefix[LOCK_PREFIX]) in check_hle()
3429 as_bad (_("missing `lock' with `%s'"), i.hle_prefix); in check_hle()
3434 if (i.prefix[HLE_PREFIX] != XRELEASE_PREFIX_OPCODE) in check_hle()
3437 i.tm.name); in check_hle()
3440 if (i.mem_operands == 0 in check_hle()
3441 || !operand_type_check (i.types[i.operands - 1], anymem)) in check_hle()
3444 " after `xrelease'"), i.tm.name); in check_hle()
3463 memset (&i, '\0', sizeof (i)); in md_assemble()
3465 i.reloc[j] = NO_RELOC; in md_assemble()
3491 && i.operands > 1 in md_assemble()
3494 && !(operand_type_check (i.types[0], imm) in md_assemble()
3495 && operand_type_check (i.types[1], imm))) in md_assemble()
3500 if (i.imm_operands == 2 in md_assemble()
3505 if (i.imm_operands) in md_assemble()
3510 if (i.disp_operands in md_assemble()
3511 && i.disp_encoding != disp_encoding_32bit in md_assemble()
3524 && !i.tm.opcode_modifier.noavx in md_assemble()
3525 && (i.tm.cpu_flags.bitfield.cpusse in md_assemble()
3526 || i.tm.cpu_flags.bitfield.cpusse2 in md_assemble()
3527 || i.tm.cpu_flags.bitfield.cpusse3 in md_assemble()
3528 || i.tm.cpu_flags.bitfield.cpussse3 in md_assemble()
3529 || i.tm.cpu_flags.bitfield.cpusse4_1 in md_assemble()
3530 || i.tm.cpu_flags.bitfield.cpusse4_2)) in md_assemble()
3534 : as_bad) (_("SSE instruction `%s' is used"), i.tm.name); in md_assemble()
3541 if ((i.tm.base_opcode & ~9) == 0x0fb6) in md_assemble()
3545 if (i.reg_operands != 2 in md_assemble()
3546 && !i.suffix in md_assemble()
3548 as_bad (_("ambiguous operand size for `%s'"), i.tm.name); in md_assemble()
3550 i.suffix = 0; in md_assemble()
3553 if (i.tm.opcode_modifier.fwait) in md_assemble()
3558 if (i.rep_prefix && !i.tm.opcode_modifier.repprefixok) in md_assemble()
3561 i.tm.name, i.rep_prefix); in md_assemble()
3567 if (i.prefix[LOCK_PREFIX] in md_assemble()
3568 && (!i.tm.opcode_modifier.islockable in md_assemble()
3569 || i.mem_operands == 0 in md_assemble()
3570 || (i.tm.base_opcode != 0x86 in md_assemble()
3571 && !operand_type_check (i.types[i.operands - 1], anymem)))) in md_assemble()
3578 if (i.hle_prefix && !check_hle ()) in md_assemble()
3582 if (i.bnd_prefix && !i.tm.opcode_modifier.bndprefixok) in md_assemble()
3585 if (i.tm.cpu_flags.bitfield.cpumpx in md_assemble()
3587 && i.prefix[ADDR_PREFIX]) in md_assemble()
3592 && i.tm.opcode_modifier.bndprefixok in md_assemble()
3593 && !i.prefix[BND_PREFIX]) in md_assemble()
3597 if (i.tm.opcode_modifier.isstring && i.mem_operands != 0) in md_assemble()
3601 i.disp_operands = 0; in md_assemble()
3608 for (j = 0; j < i.operands; j++) in md_assemble()
3609 i.types[j] = operand_type_and (i.types[j], i.tm.operand_types[j]); in md_assemble()
3616 if (i.types[0].bitfield.imm1) in md_assemble()
3617 i.imm_operands = 0; /* kludge for shift insns. */ in md_assemble()
3621 if (i.operands <= 3) in md_assemble()
3622 for (j = 0; j < i.operands; j++) in md_assemble()
3623 if (i.types[j].bitfield.inoutportreg in md_assemble()
3624 || i.types[j].bitfield.shiftcount in md_assemble()
3625 || i.types[j].bitfield.acc in md_assemble()
3626 || i.types[j].bitfield.floatacc) in md_assemble()
3627 i.reg_operands--; in md_assemble()
3630 if (!i.tm.opcode_modifier.sse2avx in md_assemble()
3631 && i.tm.opcode_modifier.immext) in md_assemble()
3635 if (i.operands) in md_assemble()
3640 else if (!quiet_warnings && i.tm.opcode_modifier.ugh) in md_assemble()
3643 as_warn (_("translating to `%sp'"), i.tm.name); in md_assemble()
3646 if (i.tm.opcode_modifier.vex || i.tm.opcode_modifier.evex) in md_assemble()
3651 i.tm.name); in md_assemble()
3655 if (i.tm.opcode_modifier.vex) in md_assemble()
3664 if (i.tm.base_opcode == INT_OPCODE in md_assemble()
3665 && !i.tm.opcode_modifier.modrm in md_assemble()
3666 && i.op[0].imms->X_add_number == 3) in md_assemble()
3668 i.tm.base_opcode = INT3_OPCODE; in md_assemble()
3669 i.imm_operands = 0; in md_assemble()
3672 if ((i.tm.opcode_modifier.jump in md_assemble()
3673 || i.tm.opcode_modifier.jumpbyte in md_assemble()
3674 || i.tm.opcode_modifier.jumpdword) in md_assemble()
3675 && i.op[0].disps->X_op == O_constant) in md_assemble()
3680 i.op[0].disps->X_add_symbol = &abs_symbol; in md_assemble()
3681 i.op[0].disps->X_op = O_symbol; in md_assemble()
3684 if (i.tm.opcode_modifier.rex64) in md_assemble()
3685 i.rex |= REX_W; in md_assemble()
3691 if ((i.types[0].bitfield.reg8 in md_assemble()
3692 && (i.op[0].regs->reg_flags & RegRex64) != 0) in md_assemble()
3693 || (i.types[1].bitfield.reg8 in md_assemble()
3694 && (i.op[1].regs->reg_flags & RegRex64) != 0) in md_assemble()
3695 || ((i.types[0].bitfield.reg8 in md_assemble()
3696 || i.types[1].bitfield.reg8) in md_assemble()
3697 && i.rex != 0)) in md_assemble()
3701 i.rex |= REX_OPCODE; in md_assemble()
3705 if (i.types[x].bitfield.reg8 in md_assemble()
3706 && (i.op[x].regs->reg_flags & RegRex64) == 0) in md_assemble()
3709 if (i.op[x].regs->reg_num > 3) in md_assemble()
3712 register_prefix, i.op[x].regs->reg_name); in md_assemble()
3718 i.op[x].regs = i.op[x].regs + 8; in md_assemble()
3723 if (i.rex != 0) in md_assemble()
3724 add_prefix (REX_OPCODE | i.rex); in md_assemble()
3809 i.hle_prefix = current_templates->start->name; in parse_insn()
3811 i.bnd_prefix = current_templates->start->name; in parse_insn()
3813 i.rep_prefix = current_templates->start->name; in parse_insn()
3830 i.swap_operand = 1; in parse_insn()
3834 i.disp_encoding = disp_encoding_8bit; in parse_insn()
3839 i.disp_encoding = disp_encoding_32bit; in parse_insn()
3855 i.suffix = SHORT_MNEM_SUFFIX; in parse_insn()
3859 i.suffix = mnem_p[-1]; in parse_insn()
3868 i.suffix = mnem_p[-1]; in parse_insn()
3880 i.suffix = SHORT_MNEM_SUFFIX; in parse_insn()
3882 i.suffix = LONG_MNEM_SUFFIX; in parse_insn()
3985 i.operands + 1); in parse_operands()
3998 i.operands + 1); in parse_operands()
4001 i.operands + 1); in parse_operands()
4011 i.operands + 1); in parse_operands()
4033 this_operand = i.operands++; in parse_operands()
4034 i.types[this_operand].bitfield.unspecified = 1; in parse_operands()
4035 if (i.operands > MAX_OPERANDS) in parse_operands()
4091 temp_type = i.types[xchg2]; in swap_2_operands()
4092 i.types[xchg2] = i.types[xchg1]; in swap_2_operands()
4093 i.types[xchg1] = temp_type; in swap_2_operands()
4094 temp_op = i.op[xchg2]; in swap_2_operands()
4095 i.op[xchg2] = i.op[xchg1]; in swap_2_operands()
4096 i.op[xchg1] = temp_op; in swap_2_operands()
4097 temp_reloc = i.reloc[xchg2]; in swap_2_operands()
4098 i.reloc[xchg2] = i.reloc[xchg1]; in swap_2_operands()
4099 i.reloc[xchg1] = temp_reloc; in swap_2_operands()
4101 if (i.mask) in swap_2_operands()
4103 if (i.mask->operand == xchg1) in swap_2_operands()
4104 i.mask->operand = xchg2; in swap_2_operands()
4105 else if (i.mask->operand == xchg2) in swap_2_operands()
4106 i.mask->operand = xchg1; in swap_2_operands()
4108 if (i.broadcast) in swap_2_operands()
4110 if (i.broadcast->operand == xchg1) in swap_2_operands()
4111 i.broadcast->operand = xchg2; in swap_2_operands()
4112 else if (i.broadcast->operand == xchg2) in swap_2_operands()
4113 i.broadcast->operand = xchg1; in swap_2_operands()
4115 if (i.rounding) in swap_2_operands()
4117 if (i.rounding->operand == xchg1) in swap_2_operands()
4118 i.rounding->operand = xchg2; in swap_2_operands()
4119 else if (i.rounding->operand == xchg2) in swap_2_operands()
4120 i.rounding->operand = xchg1; in swap_2_operands()
4127 switch (i.operands) in swap_operands()
4131 swap_2_operands (1, i.operands - 2); in swap_operands()
4134 swap_2_operands (0, i.operands - 1); in swap_operands()
4140 if (i.mem_operands == 2) in swap_operands()
4143 temp_seg = i.seg[0]; in swap_operands()
4144 i.seg[0] = i.seg[1]; in swap_operands()
4145 i.seg[1] = temp_seg; in swap_operands()
4157 if (i.suffix) in optimize_imm()
4158 guess_suffix = i.suffix; in optimize_imm()
4159 else if (i.reg_operands) in optimize_imm()
4165 for (op = i.operands; --op >= 0;) in optimize_imm()
4166 if (i.types[op].bitfield.reg8) in optimize_imm()
4171 else if (i.types[op].bitfield.reg16) in optimize_imm()
4176 else if (i.types[op].bitfield.reg32) in optimize_imm()
4181 else if (i.types[op].bitfield.reg64) in optimize_imm()
4187 else if ((flag_code == CODE_16BIT) ^ (i.prefix[DATA_PREFIX] != 0)) in optimize_imm()
4190 for (op = i.operands; --op >= 0;) in optimize_imm()
4191 if (operand_type_check (i.types[op], imm)) in optimize_imm()
4193 switch (i.op[op].imms->X_op) in optimize_imm()
4200 i.types[op].bitfield.imm32 = 1; in optimize_imm()
4201 i.types[op].bitfield.imm64 = 1; in optimize_imm()
4204 i.types[op].bitfield.imm16 = 1; in optimize_imm()
4205 i.types[op].bitfield.imm32 = 1; in optimize_imm()
4206 i.types[op].bitfield.imm32s = 1; in optimize_imm()
4207 i.types[op].bitfield.imm64 = 1; in optimize_imm()
4210 i.types[op].bitfield.imm8 = 1; in optimize_imm()
4211 i.types[op].bitfield.imm8s = 1; in optimize_imm()
4212 i.types[op].bitfield.imm16 = 1; in optimize_imm()
4213 i.types[op].bitfield.imm32 = 1; in optimize_imm()
4214 i.types[op].bitfield.imm32s = 1; in optimize_imm()
4215 i.types[op].bitfield.imm64 = 1; in optimize_imm()
4224 if ((i.types[op].bitfield.imm16) in optimize_imm()
4225 && (i.op[op].imms->X_add_number & ~(offsetT) 0xffff) == 0) in optimize_imm()
4227 i.op[op].imms->X_add_number = in optimize_imm()
4228 (((i.op[op].imms->X_add_number & 0xffff) ^ 0x8000) - 0x8000); in optimize_imm()
4230 if ((i.types[op].bitfield.imm32) in optimize_imm()
4231 && ((i.op[op].imms->X_add_number & ~(((offsetT) 2 << 31) - 1)) in optimize_imm()
4234 i.op[op].imms->X_add_number = ((i.op[op].imms->X_add_number in optimize_imm()
4238 i.types[op] in optimize_imm()
4239 = operand_type_or (i.types[op], in optimize_imm()
4240 smallest_imm_type (i.op[op].imms->X_add_number)); in optimize_imm()
4245 i.types[op].bitfield.imm32 = 0; in optimize_imm()
4289 i.types[op] = operand_type_and (i.types[op], mask); in optimize_imm()
4302 for (op = i.operands; --op >= 0;) in optimize_disp()
4303 if (operand_type_check (i.types[op], disp)) in optimize_disp()
4305 if (i.op[op].disps->X_op == O_constant) in optimize_disp()
4307 offsetT op_disp = i.op[op].disps->X_add_number; in optimize_disp()
4309 if (i.types[op].bitfield.disp16 in optimize_disp()
4316 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4318 if (i.types[op].bitfield.disp32 in optimize_disp()
4326 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4328 if (!op_disp && i.types[op].bitfield.baseindex) in optimize_disp()
4330 i.types[op].bitfield.disp8 = 0; in optimize_disp()
4331 i.types[op].bitfield.disp16 = 0; in optimize_disp()
4332 i.types[op].bitfield.disp32 = 0; in optimize_disp()
4333 i.types[op].bitfield.disp32s = 0; in optimize_disp()
4334 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4335 i.op[op].disps = 0; in optimize_disp()
4336 i.disp_operands--; in optimize_disp()
4342 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4343 i.types[op].bitfield.disp32s = 1; in optimize_disp()
4345 if (i.prefix[ADDR_PREFIX] in optimize_disp()
4347 i.types[op].bitfield.disp32 = 1; in optimize_disp()
4349 if ((i.types[op].bitfield.disp32 in optimize_disp()
4350 || i.types[op].bitfield.disp32s in optimize_disp()
4351 || i.types[op].bitfield.disp16) in optimize_disp()
4353 i.types[op].bitfield.disp8 = 1; in optimize_disp()
4355 else if (i.reloc[op] == BFD_RELOC_386_TLS_DESC_CALL in optimize_disp()
4356 || i.reloc[op] == BFD_RELOC_X86_64_TLSDESC_CALL) in optimize_disp()
4359 i.op[op].disps, 0, i.reloc[op]); in optimize_disp()
4360 i.types[op].bitfield.disp8 = 0; in optimize_disp()
4361 i.types[op].bitfield.disp16 = 0; in optimize_disp()
4362 i.types[op].bitfield.disp32 = 0; in optimize_disp()
4363 i.types[op].bitfield.disp32s = 0; in optimize_disp()
4364 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4368 i.types[op].bitfield.disp64 = 0; in optimize_disp()
4381 && i.index_reg in check_VecOperands()
4382 && (i.index_reg->reg_type.bitfield.regxmm in check_VecOperands()
4383 || i.index_reg->reg_type.bitfield.regymm in check_VecOperands()
4384 || i.index_reg->reg_type.bitfield.regzmm)) in check_VecOperands()
4386 i.error = unsupported_vector_index_register; in check_VecOperands()
4392 && (!i.mask || i.mask->mask->reg_num == 0)) in check_VecOperands()
4394 i.error = no_default_mask; in check_VecOperands()
4402 if (!i.index_reg in check_VecOperands()
4404 && i.index_reg->reg_type.bitfield.regxmm) in check_VecOperands()
4406 && i.index_reg->reg_type.bitfield.regymm) in check_VecOperands()
4408 && i.index_reg->reg_type.bitfield.regzmm))) in check_VecOperands()
4410 i.error = invalid_vsib_address; in check_VecOperands()
4414 gas_assert (i.reg_operands == 2 || i.mask); in check_VecOperands()
4415 if (i.reg_operands == 2 && !i.mask) in check_VecOperands()
4417 gas_assert (i.types[0].bitfield.regxmm in check_VecOperands()
4418 || i.types[0].bitfield.regymm); in check_VecOperands()
4419 gas_assert (i.types[2].bitfield.regxmm in check_VecOperands()
4420 || i.types[2].bitfield.regymm); in check_VecOperands()
4423 if (register_number (i.op[0].regs) in check_VecOperands()
4424 != register_number (i.index_reg) in check_VecOperands()
4425 && register_number (i.op[2].regs) in check_VecOperands()
4426 != register_number (i.index_reg) in check_VecOperands()
4427 && register_number (i.op[0].regs) in check_VecOperands()
4428 != register_number (i.op[2].regs)) in check_VecOperands()
4432 i.error = invalid_vector_register_set; in check_VecOperands()
4437 else if (i.reg_operands == 1 && i.mask) in check_VecOperands()
4439 if ((i.types[1].bitfield.regymm in check_VecOperands()
4440 || i.types[1].bitfield.regzmm) in check_VecOperands()
4441 && (register_number (i.op[1].regs) in check_VecOperands()
4442 == register_number (i.index_reg))) in check_VecOperands()
4446 i.error = invalid_vector_register_set; in check_VecOperands()
4457 if (i.broadcast) in check_VecOperands()
4464 if (i.broadcast->type != t->opcode_modifier.broadcast in check_VecOperands()
4465 || !i.types[i.broadcast->operand].bitfield.mem in check_VecOperands()
4467 && !i.types[i.broadcast->operand].bitfield.dword in check_VecOperands()
4468 && !i.types[i.broadcast->operand].bitfield.unspecified) in check_VecOperands()
4470 && !i.types[i.broadcast->operand].bitfield.qword in check_VecOperands()
4471 && !i.types[i.broadcast->operand].bitfield.unspecified)) in check_VecOperands()
4475 if (i.broadcast->type == BROADCAST_1TO16) in check_VecOperands()
4477 else if (i.broadcast->type == BROADCAST_1TO8) in check_VecOperands()
4479 else if (i.broadcast->type == BROADCAST_1TO4) in check_VecOperands()
4481 else if (i.broadcast->type == BROADCAST_1TO2) in check_VecOperands()
4487 && !t->operand_types[i.broadcast->operand].bitfield.ymmword) in check_VecOperands()
4489 && !t->operand_types[i.broadcast->operand].bitfield.zmmword)) in check_VecOperands()
4492 i.error = unsupported_broadcast; in check_VecOperands()
4498 else if (t->opcode_modifier.broadcast && i.mem_operands) in check_VecOperands()
4501 for (op = 0; op < i.operands; op++) in check_VecOperands()
4502 if (operand_type_check (i.types[op], anymem)) in check_VecOperands()
4504 gas_assert (op < i.operands); in check_VecOperands()
4507 && i.types[op].bitfield.dword) in check_VecOperands()
4509 && i.types[op].bitfield.qword)) in check_VecOperands()
4511 i.error = broadcast_needed; in check_VecOperands()
4517 if (i.mask in check_VecOperands()
4519 || (i.mask->zeroing in check_VecOperands()
4522 i.error = unsupported_masking; in check_VecOperands()
4527 if (i.mask && (i.mask->operand != (int) (i.operands - 1))) in check_VecOperands()
4529 i.error = mask_not_on_destination; in check_VecOperands()
4534 if (i.rounding) in check_VecOperands()
4536 if ((i.rounding->type != saeonly in check_VecOperands()
4538 || (i.rounding->type == saeonly in check_VecOperands()
4542 i.error = unsupported_rc_sae; in check_VecOperands()
4548 if (i.imm_operands > 1 in check_VecOperands()
4549 && i.rounding->operand != (int) (i.imm_operands - 1)) in check_VecOperands()
4551 i.error = rc_sae_operand_not_last_imm; in check_VecOperands()
4559 if (i.broadcast) in check_VecOperands()
4560 i.memshift = t->opcode_modifier.vecesize ? 3 : 2; in check_VecOperands()
4562 i.memshift = t->opcode_modifier.disp8memshift; in check_VecOperands()
4564 for (op = 0; op < i.operands; op++) in check_VecOperands()
4565 if (operand_type_check (i.types[op], disp) in check_VecOperands()
4566 && i.op[op].disps->X_op == O_constant) in check_VecOperands()
4568 offsetT value = i.op[op].disps->X_add_number; in check_VecOperands()
4573 i.types[op].bitfield.vec_disp8 = 1; in check_VecOperands()
4579 i.types[op].bitfield.disp8 = 0; in check_VecOperands()
4581 i.types[op].bitfield.disp16 = 0; in check_VecOperands()
4590 i.error = try_vector_disp8; in check_VecOperands()
4597 i.memshift = -1; in check_VecOperands()
4609 if (i.need_vrex && !t->opcode_modifier.evex) in VEX_check_operands()
4611 i.error = invalid_register_operand; in VEX_check_operands()
4621 if (i.op[0].imms->X_op != O_constant in VEX_check_operands()
4622 || !fits_in_imm4 (i.op[0].imms->X_add_number)) in VEX_check_operands()
4624 i.error = bad_imm4; in VEX_check_operands()
4629 i.types[0] = vec_imm4; in VEX_check_operands()
4659 if (i.suffix == BYTE_MNEM_SUFFIX) in match_template()
4661 else if (i.suffix == WORD_MNEM_SUFFIX) in match_template()
4663 else if (i.suffix == SHORT_MNEM_SUFFIX) in match_template()
4665 else if (i.suffix == LONG_MNEM_SUFFIX) in match_template()
4667 else if (i.suffix == QWORD_MNEM_SUFFIX) in match_template()
4669 else if (i.suffix == LONG_DOUBLE_MNEM_SUFFIX) in match_template()
4673 i.error = number_of_operands_mismatch; in match_template()
4679 if (i.operands != t->operands) in match_template()
4683 i.error = unsupported; in match_template()
4690 i.error = old_gcc_only; in match_template()
4695 i.error = unsupported_with_intel_mnemonic; in match_template()
4700 i.error = unsupported_syntax; in match_template()
4706 i.error = invalid_instruction_suffix; in match_template()
4723 if (i.suffix == QWORD_MNEM_SUFFIX in match_template()
4742 else if (i.suffix == LONG_MNEM_SUFFIX in match_template()
4764 if (i.prefix[ADDR_PREFIX] != 0) in match_template()
4810 overlap0 = operand_type_and (i.types[0], operand_types[0]); in match_template()
4814 if (!operand_type_match (overlap0, i.types[0])) in match_template()
4824 && operand_type_equal (&i.types [0], &acc32) in match_template()
4825 && operand_type_equal (&i.types [1], &acc32)) in match_template()
4827 if (i.swap_operand) in match_template()
4839 if (i.swap_operand && t->opcode_modifier.s) in match_template()
4843 overlap1 = operand_type_and (i.types[1], operand_types[1]); in match_template()
4844 if (!operand_type_match (overlap0, i.types[0]) in match_template()
4845 || !operand_type_match (overlap1, i.types[1]) in match_template()
4847 && !operand_type_register_match (overlap0, i.types[0], in match_template()
4849 overlap1, i.types[1], in match_template()
4858 overlap0 = operand_type_and (i.types[0], operand_types[1]); in match_template()
4859 overlap1 = operand_type_and (i.types[1], operand_types[0]); in match_template()
4860 if (!operand_type_match (overlap0, i.types[0]) in match_template()
4861 || !operand_type_match (overlap1, i.types[1]) in match_template()
4864 i.types[0], in match_template()
4867 i.types[1], in match_template()
4890 overlap4 = operand_type_and (i.types[4], in match_template()
4893 overlap3 = operand_type_and (i.types[3], in match_template()
4896 overlap2 = operand_type_and (i.types[2], in match_template()
4904 if (!operand_type_match (overlap4, i.types[4]) in match_template()
4906 i.types[3], in match_template()
4909 i.types[4], in match_template()
4913 if (!operand_type_match (overlap3, i.types[3]) in match_template()
4916 i.types[2], in match_template()
4919 i.types[3], in match_template()
4927 if (!operand_type_match (overlap2, i.types[2]) in match_template()
4930 i.types[1], in match_template()
4933 i.types[2], in match_template()
4951 specific_error = i.error; in match_template()
4963 switch (specific_error ? specific_error : i.error) in match_template()
5046 && (i.types[0].bitfield.jumpabsolute in match_template()
5062 i.tm = *t; in match_template()
5065 i.tm.operand_types[addr_prefix_disp] in match_template()
5074 i.tm.base_opcode ^= found_reverse_match; in match_template()
5076 i.tm.operand_types[0] = operand_types[1]; in match_template()
5077 i.tm.operand_types[1] = operand_types[0]; in match_template()
5086 int mem_op = operand_type_check (i.types[0], anymem) ? 0 : 1; in check_string()
5087 if (i.tm.operand_types[mem_op].bitfield.esseg) in check_string()
5089 if (i.seg[0] != NULL && i.seg[0] != &es) in check_string()
5092 i.tm.name, in check_string()
5101 i.seg[0] = i.seg[1]; in check_string()
5103 else if (i.tm.operand_types[mem_op + 1].bitfield.esseg) in check_string()
5105 if (i.seg[1] != NULL && i.seg[1] != &es) in check_string()
5108 i.tm.name, in check_string()
5122 if (i.tm.opcode_modifier.size16) in process_suffix()
5123 i.suffix = WORD_MNEM_SUFFIX; in process_suffix()
5124 else if (i.tm.opcode_modifier.size32) in process_suffix()
5125 i.suffix = LONG_MNEM_SUFFIX; in process_suffix()
5126 else if (i.tm.opcode_modifier.size64) in process_suffix()
5127 i.suffix = QWORD_MNEM_SUFFIX; in process_suffix()
5128 else if (i.reg_operands) in process_suffix()
5132 if (!i.suffix) in process_suffix()
5138 if (i.tm.base_opcode == 0xf20f38f1) in process_suffix()
5140 if (i.types[0].bitfield.reg16) in process_suffix()
5141 i.suffix = WORD_MNEM_SUFFIX; in process_suffix()
5142 else if (i.types[0].bitfield.reg32) in process_suffix()
5143 i.suffix = LONG_MNEM_SUFFIX; in process_suffix()
5144 else if (i.types[0].bitfield.reg64) in process_suffix()
5145 i.suffix = QWORD_MNEM_SUFFIX; in process_suffix()
5147 else if (i.tm.base_opcode == 0xf20f38f0) in process_suffix()
5149 if (i.types[0].bitfield.reg8) in process_suffix()
5150 i.suffix = BYTE_MNEM_SUFFIX; in process_suffix()
5153 if (!i.suffix) in process_suffix()
5157 if (i.tm.base_opcode == 0xf20f38f1 in process_suffix()
5158 || i.tm.base_opcode == 0xf20f38f0) in process_suffix()
5162 i.tm.name); in process_suffix()
5166 for (op = i.operands; --op >= 0;) in process_suffix()
5167 if (!i.tm.operand_types[op].bitfield.inoutportreg) in process_suffix()
5169 if (i.types[op].bitfield.reg8) in process_suffix()
5171 i.suffix = BYTE_MNEM_SUFFIX; in process_suffix()
5174 else if (i.types[op].bitfield.reg16) in process_suffix()
5176 i.suffix = WORD_MNEM_SUFFIX; in process_suffix()
5179 else if (i.types[op].bitfield.reg32) in process_suffix()
5181 i.suffix = LONG_MNEM_SUFFIX; in process_suffix()
5184 else if (i.types[op].bitfield.reg64) in process_suffix()
5186 i.suffix = QWORD_MNEM_SUFFIX; in process_suffix()
5192 else if (i.suffix == BYTE_MNEM_SUFFIX) in process_suffix()
5195 && i.tm.opcode_modifier.ignoresize in process_suffix()
5196 && i.tm.opcode_modifier.no_bsuf) in process_suffix()
5197 i.suffix = 0; in process_suffix()
5201 else if (i.suffix == LONG_MNEM_SUFFIX) in process_suffix()
5204 && i.tm.opcode_modifier.ignoresize in process_suffix()
5205 && i.tm.opcode_modifier.no_lsuf) in process_suffix()
5206 i.suffix = 0; in process_suffix()
5210 else if (i.suffix == QWORD_MNEM_SUFFIX) in process_suffix()
5213 && i.tm.opcode_modifier.ignoresize in process_suffix()
5214 && i.tm.opcode_modifier.no_qsuf) in process_suffix()
5215 i.suffix = 0; in process_suffix()
5219 else if (i.suffix == WORD_MNEM_SUFFIX) in process_suffix()
5222 && i.tm.opcode_modifier.ignoresize in process_suffix()
5223 && i.tm.opcode_modifier.no_wsuf) in process_suffix()
5224 i.suffix = 0; in process_suffix()
5228 else if (i.suffix == XMMWORD_MNEM_SUFFIX in process_suffix()
5229 || i.suffix == YMMWORD_MNEM_SUFFIX in process_suffix()
5230 || i.suffix == ZMMWORD_MNEM_SUFFIX) in process_suffix()
5235 else if (intel_syntax && i.tm.opcode_modifier.ignoresize) in process_suffix()
5241 else if (i.tm.opcode_modifier.defaultsize in process_suffix()
5242 && !i.suffix in process_suffix()
5244 && i.tm.opcode_modifier.no_ssuf) in process_suffix()
5246 i.suffix = stackop_size; in process_suffix()
5249 && !i.suffix in process_suffix()
5250 && (i.tm.operand_types[0].bitfield.jumpabsolute in process_suffix()
5251 || i.tm.opcode_modifier.jumpbyte in process_suffix()
5252 || i.tm.opcode_modifier.jumpintersegment in process_suffix()
5253 || (i.tm.base_opcode == 0x0f01 /* [ls][gi]dt */ in process_suffix()
5254 && i.tm.extension_opcode <= 3))) in process_suffix()
5259 if (!i.tm.opcode_modifier.no_qsuf) in process_suffix()
5261 i.suffix = QWORD_MNEM_SUFFIX; in process_suffix()
5265 if (!i.tm.opcode_modifier.no_lsuf) in process_suffix()
5266 i.suffix = LONG_MNEM_SUFFIX; in process_suffix()
5269 if (!i.tm.opcode_modifier.no_wsuf) in process_suffix()
5270 i.suffix = WORD_MNEM_SUFFIX; in process_suffix()
5275 if (!i.suffix) in process_suffix()
5279 if (i.tm.opcode_modifier.w) in process_suffix()
5290 suffixes = !i.tm.opcode_modifier.no_bsuf; in process_suffix()
5291 if (!i.tm.opcode_modifier.no_wsuf) in process_suffix()
5293 if (!i.tm.opcode_modifier.no_lsuf) in process_suffix()
5295 if (!i.tm.opcode_modifier.no_ldsuf) in process_suffix()
5297 if (!i.tm.opcode_modifier.no_ssuf) in process_suffix()
5299 if (!i.tm.opcode_modifier.no_qsuf) in process_suffix()
5303 if (i.tm.opcode_modifier.w in process_suffix()
5305 && !i.tm.opcode_modifier.defaultsize in process_suffix()
5306 && !i.tm.opcode_modifier.ignoresize)) in process_suffix()
5308 as_bad (_("ambiguous operand size for `%s'"), i.tm.name); in process_suffix()
5317 if (i.suffix in process_suffix()
5318 && i.suffix != BYTE_MNEM_SUFFIX in process_suffix()
5319 && i.suffix != XMMWORD_MNEM_SUFFIX in process_suffix()
5320 && i.suffix != YMMWORD_MNEM_SUFFIX in process_suffix()
5321 && i.suffix != ZMMWORD_MNEM_SUFFIX) in process_suffix()
5324 if (i.tm.opcode_modifier.w) in process_suffix()
5326 if (i.tm.opcode_modifier.shortform) in process_suffix()
5327 i.tm.base_opcode |= 8; in process_suffix()
5329 i.tm.base_opcode |= 1; in process_suffix()
5335 if (i.tm.opcode_modifier.addrprefixop0) in process_suffix()
5340 && i.op->regs[0].reg_type.bitfield.reg16) in process_suffix()
5342 && i.op->regs[0].reg_type.bitfield.reg32)) in process_suffix()
5346 else if (i.suffix != QWORD_MNEM_SUFFIX in process_suffix()
5347 && i.suffix != LONG_DOUBLE_MNEM_SUFFIX in process_suffix()
5348 && !i.tm.opcode_modifier.ignoresize in process_suffix()
5349 && !i.tm.opcode_modifier.floatmf in process_suffix()
5350 && ((i.suffix == LONG_MNEM_SUFFIX) == (flag_code == CODE_16BIT) in process_suffix()
5352 && i.tm.opcode_modifier.jumpbyte))) in process_suffix()
5356 if (i.tm.opcode_modifier.jumpbyte) /* jcxz, loop */ in process_suffix()
5364 if (i.suffix == QWORD_MNEM_SUFFIX in process_suffix()
5366 && !i.tm.opcode_modifier.norex64) in process_suffix()
5370 if (! (i.operands == 2 in process_suffix()
5371 && i.tm.base_opcode == 0x90 in process_suffix()
5372 && i.tm.extension_opcode == None in process_suffix()
5373 && operand_type_equal (&i.types [0], &acc64) in process_suffix()
5374 && operand_type_equal (&i.types [1], &acc64)) in process_suffix()
5375 && ! (i.operands == 1 in process_suffix()
5376 && i.tm.base_opcode == 0xfc7 in process_suffix()
5377 && i.tm.extension_opcode == 1 in process_suffix()
5378 && !operand_type_check (i.types [0], reg) in process_suffix()
5379 && operand_type_check (i.types [0], anymem))) in process_suffix()
5380 i.rex |= REX_W; in process_suffix()
5384 if (i.suffix == LONG_MNEM_SUFFIX) in process_suffix()
5385 if (i.tm.opcode_modifier.floatmf) in process_suffix()
5386 i.tm.base_opcode ^= 4; in process_suffix()
5397 for (op = i.operands; --op >= 0;) in check_byte_reg()
5402 if (i.types[op].bitfield.reg8) in check_byte_reg()
5406 if (i.tm.operand_types[op].bitfield.inoutportreg) in check_byte_reg()
5410 if (i.tm.base_opcode == 0xf20f38f0) in check_byte_reg()
5413 if ((i.types[op].bitfield.reg16 in check_byte_reg()
5414 || i.types[op].bitfield.reg32 in check_byte_reg()
5415 || i.types[op].bitfield.reg64) in check_byte_reg()
5416 && i.op[op].regs->reg_num < 4 in check_byte_reg()
5425 (i.op[op].regs + (i.types[op].bitfield.reg16 in check_byte_reg()
5429 i.op[op].regs->reg_name, in check_byte_reg()
5430 i.suffix); in check_byte_reg()
5435 if (i.types[op].bitfield.reg16 in check_byte_reg()
5436 || i.types[op].bitfield.reg32 in check_byte_reg()
5437 || i.types[op].bitfield.reg64 in check_byte_reg()
5438 || i.types[op].bitfield.regmmx in check_byte_reg()
5439 || i.types[op].bitfield.regxmm in check_byte_reg()
5440 || i.types[op].bitfield.regymm in check_byte_reg()
5441 || i.types[op].bitfield.regzmm in check_byte_reg()
5442 || i.types[op].bitfield.sreg2 in check_byte_reg()
5443 || i.types[op].bitfield.sreg3 in check_byte_reg()
5444 || i.types[op].bitfield.control in check_byte_reg()
5445 || i.types[op].bitfield.debug in check_byte_reg()
5446 || i.types[op].bitfield.test in check_byte_reg()
5447 || i.types[op].bitfield.floatreg in check_byte_reg()
5448 || i.types[op].bitfield.floatacc) in check_byte_reg()
5452 i.op[op].regs->reg_name, in check_byte_reg()
5453 i.tm.name, in check_byte_reg()
5454 i.suffix); in check_byte_reg()
5466 for (op = i.operands; --op >= 0;) in check_long_reg()
5469 if (i.types[op].bitfield.reg8 in check_long_reg()
5470 && (i.tm.operand_types[op].bitfield.reg16 in check_long_reg()
5471 || i.tm.operand_types[op].bitfield.reg32 in check_long_reg()
5472 || i.tm.operand_types[op].bitfield.acc)) in check_long_reg()
5476 i.op[op].regs->reg_name, in check_long_reg()
5477 i.tm.name, in check_long_reg()
5478 i.suffix); in check_long_reg()
5483 && i.types[op].bitfield.reg16 in check_long_reg()
5484 && (i.tm.operand_types[op].bitfield.reg32 in check_long_reg()
5485 || i.tm.operand_types[op].bitfield.acc)) in check_long_reg()
5492 register_prefix, i.op[op].regs->reg_name, in check_long_reg()
5493 i.suffix); in check_long_reg()
5499 (i.op[op].regs + REGNAM_EAX - REGNAM_AX)->reg_name, in check_long_reg()
5500 register_prefix, i.op[op].regs->reg_name, i.suffix); in check_long_reg()
5504 else if (i.types[op].bitfield.reg64 in check_long_reg()
5505 && (i.tm.operand_types[op].bitfield.reg32 in check_long_reg()
5506 || i.tm.operand_types[op].bitfield.acc)) in check_long_reg()
5509 && i.tm.opcode_modifier.toqword in check_long_reg()
5510 && !i.types[0].bitfield.regxmm) in check_long_reg()
5513 i.suffix = QWORD_MNEM_SUFFIX; in check_long_reg()
5518 register_prefix, i.op[op].regs->reg_name, in check_long_reg()
5519 i.suffix); in check_long_reg()
5531 for (op = i.operands; --op >= 0; ) in check_qword_reg()
5534 if (i.types[op].bitfield.reg8 in check_qword_reg()
5535 && (i.tm.operand_types[op].bitfield.reg16 in check_qword_reg()
5536 || i.tm.operand_types[op].bitfield.reg32 in check_qword_reg()
5537 || i.tm.operand_types[op].bitfield.acc)) in check_qword_reg()
5541 i.op[op].regs->reg_name, in check_qword_reg()
5542 i.tm.name, in check_qword_reg()
5543 i.suffix); in check_qword_reg()
5547 else if ((i.types[op].bitfield.reg16 in check_qword_reg()
5548 || i.types[op].bitfield.reg32) in check_qword_reg()
5549 && (i.tm.operand_types[op].bitfield.reg32 in check_qword_reg()
5550 || i.tm.operand_types[op].bitfield.acc)) in check_qword_reg()
5555 && i.tm.opcode_modifier.todword in check_qword_reg()
5556 && !i.types[0].bitfield.regxmm) in check_qword_reg()
5559 i.suffix = LONG_MNEM_SUFFIX; in check_qword_reg()
5564 register_prefix, i.op[op].regs->reg_name, in check_qword_reg()
5565 i.suffix); in check_qword_reg()
5576 for (op = i.operands; --op >= 0;) in check_word_reg()
5579 if (i.types[op].bitfield.reg8 in check_word_reg()
5580 && (i.tm.operand_types[op].bitfield.reg16 in check_word_reg()
5581 || i.tm.operand_types[op].bitfield.reg32 in check_word_reg()
5582 || i.tm.operand_types[op].bitfield.acc)) in check_word_reg()
5586 i.op[op].regs->reg_name, in check_word_reg()
5587 i.tm.name, in check_word_reg()
5588 i.suffix); in check_word_reg()
5593 && (i.types[op].bitfield.reg32 in check_word_reg()
5594 || i.types[op].bitfield.reg64) in check_word_reg()
5595 && (i.tm.operand_types[op].bitfield.reg16 in check_word_reg()
5596 || i.tm.operand_types[op].bitfield.acc)) in check_word_reg()
5603 register_prefix, i.op[op].regs->reg_name, in check_word_reg()
5604 i.suffix); in check_word_reg()
5610 (i.op[op].regs + REGNAM_AX - REGNAM_EAX)->reg_name, in check_word_reg()
5611 register_prefix, i.op[op].regs->reg_name, i.suffix); in check_word_reg()
5620 i386_operand_type overlap = i.types[j]; in update_imm()
5634 if (i.suffix) in update_imm()
5639 if (i.suffix == BYTE_MNEM_SUFFIX) in update_imm()
5644 else if (i.suffix == WORD_MNEM_SUFFIX) in update_imm()
5646 else if (i.suffix == QWORD_MNEM_SUFFIX) in update_imm()
5659 if ((flag_code == CODE_16BIT) ^ (i.prefix[DATA_PREFIX] != 0)) in update_imm()
5676 i.types[j] = overlap; in update_imm()
5687 n = i.operands > 2 ? 2 : i.operands; in finalize_imm()
5695 gas_assert (operand_type_check (i.types[2], imm) == 0); in finalize_imm()
5708 i.tm.name, register_prefix, ireg); in bad_implicit_operand()
5711 i.tm.name, register_prefix, ireg); in bad_implicit_operand()
5723 if (i.tm.opcode_modifier.sse2avx && i.tm.opcode_modifier.vexvvvv) in process_operands()
5725 unsigned int dupl = i.operands; in process_operands()
5730 gas_assert (i.reg_operands in process_operands()
5732 && operand_type_equal (&i.types[dest], ®xmm)); in process_operands()
5734 if (i.tm.opcode_modifier.firstxmm0) in process_operands()
5737 gas_assert (operand_type_equal (&i.types[0], ®xmm)); in process_operands()
5738 if (register_number (i.op[0].regs) != 0) in process_operands()
5741 if (i.tm.opcode_modifier.vexsources == VEX3SOURCES) in process_operands()
5752 for (j = 1; j < i.operands; j++) in process_operands()
5754 i.op[j - 1] = i.op[j]; in process_operands()
5755 i.types[j - 1] = i.types[j]; in process_operands()
5756 i.tm.operand_types[j - 1] = i.tm.operand_types[j]; in process_operands()
5760 else if (i.tm.opcode_modifier.implicit1stxmm0) in process_operands()
5763 && (i.tm.opcode_modifier.vexsources in process_operands()
5768 for (j = i.operands; j > 0; j--) in process_operands()
5770 i.op[j] = i.op[j - 1]; in process_operands()
5771 i.types[j] = i.types[j - 1]; in process_operands()
5772 i.tm.operand_types[j] = i.tm.operand_types[j - 1]; in process_operands()
5774 i.op[0].regs in process_operands()
5776 i.types[0] = regxmm; in process_operands()
5777 i.tm.operand_types[0] = regxmm; in process_operands()
5779 i.operands += 2; in process_operands()
5780 i.reg_operands += 2; in process_operands()
5781 i.tm.operands += 2; in process_operands()
5785 i.op[dupl] = i.op[dest]; in process_operands()
5786 i.types[dupl] = i.types[dest]; in process_operands()
5787 i.tm.operand_types[dupl] = i.tm.operand_types[dest]; in process_operands()
5792 i.operands++; in process_operands()
5793 i.reg_operands++; in process_operands()
5794 i.tm.operands++; in process_operands()
5796 i.op[dupl] = i.op[dest]; in process_operands()
5797 i.types[dupl] = i.types[dest]; in process_operands()
5798 i.tm.operand_types[dupl] = i.tm.operand_types[dest]; in process_operands()
5801 if (i.tm.opcode_modifier.immext) in process_operands()
5804 else if (i.tm.opcode_modifier.firstxmm0) in process_operands()
5809 gas_assert (i.reg_operands in process_operands()
5810 && (operand_type_equal (&i.types[0], ®xmm) in process_operands()
5811 || operand_type_equal (&i.types[0], ®ymm) in process_operands()
5812 || operand_type_equal (&i.types[0], ®zmm))); in process_operands()
5813 if (register_number (i.op[0].regs) != 0) in process_operands()
5814 return bad_implicit_operand (i.types[0].bitfield.regxmm); in process_operands()
5816 for (j = 1; j < i.operands; j++) in process_operands()
5818 i.op[j - 1] = i.op[j]; in process_operands()
5819 i.types[j - 1] = i.types[j]; in process_operands()
5823 i.tm.operand_types [j - 1] = i.tm.operand_types [j]; in process_operands()
5826 i.operands--; in process_operands()
5827 i.reg_operands--; in process_operands()
5828 i.tm.operands--; in process_operands()
5830 else if (i.tm.opcode_modifier.regkludge) in process_operands()
5838 if (operand_type_check (i.types[0], reg)) in process_operands()
5843 gas_assert (i.reg_operands == 1 in process_operands()
5844 && i.op[first_reg_op + 1].regs == 0); in process_operands()
5845 i.op[first_reg_op + 1].regs = i.op[first_reg_op].regs; in process_operands()
5846 i.types[first_reg_op + 1] = i.types[first_reg_op]; in process_operands()
5847 i.operands++; in process_operands()
5848 i.reg_operands++; in process_operands()
5851 if (i.tm.opcode_modifier.shortform) in process_operands()
5853 if (i.types[0].bitfield.sreg2 in process_operands()
5854 || i.types[0].bitfield.sreg3) in process_operands()
5856 if (i.tm.base_opcode == POP_SEG_SHORT in process_operands()
5857 && i.op[0].regs->reg_num == 1) in process_operands()
5862 i.tm.base_opcode |= (i.op[0].regs->reg_num << 3); in process_operands()
5863 if ((i.op[0].regs->reg_flags & RegRex) != 0) in process_operands()
5864 i.rex |= REX_B; in process_operands()
5872 if (i.types[0].bitfield.floatreg in process_operands()
5873 || operand_type_check (i.types[0], reg)) in process_operands()
5878 i.tm.base_opcode |= i.op[op].regs->reg_num; in process_operands()
5879 if ((i.op[op].regs->reg_flags & RegRex) != 0) in process_operands()
5880 i.rex |= REX_B; in process_operands()
5881 if (!quiet_warnings && i.tm.opcode_modifier.ugh) in process_operands()
5885 if (i.operands == 2) in process_operands()
5888 as_warn (_("translating to `%s %s%s,%s%s'"), i.tm.name, in process_operands()
5889 register_prefix, i.op[!intel_syntax].regs->reg_name, in process_operands()
5890 register_prefix, i.op[intel_syntax].regs->reg_name); in process_operands()
5895 as_warn (_("translating to `%s %s%s'"), i.tm.name, in process_operands()
5896 register_prefix, i.op[0].regs->reg_name); in process_operands()
5901 else if (i.tm.opcode_modifier.modrm) in process_operands()
5909 else if ((i.tm.base_opcode & ~0x3) == MOV_AX_DISP32) in process_operands()
5913 else if (i.tm.opcode_modifier.isstring) in process_operands()
5920 if (i.tm.base_opcode == 0x8d /* lea */ in process_operands()
5921 && i.seg[0] in process_operands()
5923 as_warn (_("segment override on `%s' is ineffectual"), i.tm.name); in process_operands()
5930 if ((i.seg[0]) && (i.seg[0] != default_seg)) in process_operands()
5932 if (!add_prefix (i.seg[0]->seg_prefix)) in process_operands()
5947 vex_3_sources = i.tm.opcode_modifier.vexsources == VEX3SOURCES; in build_modrm_byte()
5953 if (i.tm.opcode_modifier.veximmext in build_modrm_byte()
5954 && i.tm.opcode_modifier.immext) in build_modrm_byte()
5956 dest = i.operands - 2; in build_modrm_byte()
5960 dest = i.operands - 1; in build_modrm_byte()
5970 gas_assert ((i.reg_operands == 4 in build_modrm_byte()
5971 || (i.reg_operands == 3 && i.mem_operands == 1)) in build_modrm_byte()
5972 && i.tm.opcode_modifier.vexvvvv == VEXXDS in build_modrm_byte()
5973 && (i.tm.opcode_modifier.veximmext in build_modrm_byte()
5974 || (i.imm_operands == 1 in build_modrm_byte()
5975 && i.types[0].bitfield.vec_imm4 in build_modrm_byte()
5976 && (i.tm.opcode_modifier.vexw == VEXW0 in build_modrm_byte()
5977 || i.tm.opcode_modifier.vexw == VEXW1) in build_modrm_byte()
5978 && (operand_type_equal (&i.tm.operand_types[dest], ®xmm) in build_modrm_byte()
5979 || operand_type_equal (&i.tm.operand_types[dest], ®ymm) in build_modrm_byte()
5980 || operand_type_equal (&i.tm.operand_types[dest], ®zmm))))); in build_modrm_byte()
5982 if (i.imm_operands == 0) in build_modrm_byte()
5986 exp = &im_expressions[i.imm_operands++]; in build_modrm_byte()
5987 i.op[i.operands].imms = exp; in build_modrm_byte()
5988 i.types[i.operands] = imm8; in build_modrm_byte()
5989 i.operands++; in build_modrm_byte()
5992 if (i.tm.opcode_modifier.vexw == VEXW1) in build_modrm_byte()
6004 if (i.tm.cpu_flags.bitfield.cpufma) in build_modrm_byte()
6012 gas_assert (operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6014 || operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6016 || operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6019 exp->X_add_number = register_number (i.op[reg_slot].regs) << 4; in build_modrm_byte()
6020 gas_assert ((i.op[reg_slot].regs->reg_flags & RegVRex) == 0); in build_modrm_byte()
6026 if (i.tm.opcode_modifier.vexw == VEXW0) in build_modrm_byte()
6043 if (i.tm.opcode_modifier.immext) in build_modrm_byte()
6047 imm_slot = i.operands - 1; in build_modrm_byte()
6056 i.types[imm_slot].bitfield.imm8 = 1; in build_modrm_byte()
6059 gas_assert (operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6061 || operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6063 || operand_type_equal (&i.tm.operand_types[reg_slot], in build_modrm_byte()
6065 i.op[imm_slot].imms->X_add_number in build_modrm_byte()
6066 |= register_number (i.op[reg_slot].regs) << 4; in build_modrm_byte()
6067 gas_assert ((i.op[reg_slot].regs->reg_flags & RegVRex) == 0); in build_modrm_byte()
6070 gas_assert (operand_type_equal (&i.tm.operand_types[nds], ®xmm) in build_modrm_byte()
6071 || operand_type_equal (&i.tm.operand_types[nds], in build_modrm_byte()
6073 || operand_type_equal (&i.tm.operand_types[nds], in build_modrm_byte()
6075 i.vex.register_specifier = i.op[nds].regs; in build_modrm_byte()
6086 if (i.mem_operands == 0 in build_modrm_byte()
6087 && ((i.reg_operands == 2 in build_modrm_byte()
6088 && i.tm.opcode_modifier.vexvvvv <= VEXXDS) in build_modrm_byte()
6089 || (i.reg_operands == 3 in build_modrm_byte()
6090 && i.tm.opcode_modifier.vexvvvv == VEXXDS) in build_modrm_byte()
6091 || (i.reg_operands == 4 && vex_3_sources))) in build_modrm_byte()
6093 switch (i.operands) in build_modrm_byte()
6103 gas_assert (i.imm_operands == 1 in build_modrm_byte()
6104 || (i.imm_operands == 0 in build_modrm_byte()
6105 && (i.tm.opcode_modifier.vexvvvv == VEXXDS in build_modrm_byte()
6106 || i.types[0].bitfield.shiftcount))); in build_modrm_byte()
6107 if (operand_type_check (i.types[0], imm) in build_modrm_byte()
6108 || i.types[0].bitfield.shiftcount) in build_modrm_byte()
6121 gas_assert ((i.imm_operands == 2 in build_modrm_byte()
6122 && i.types[0].bitfield.imm8 in build_modrm_byte()
6123 && i.types[1].bitfield.imm8) in build_modrm_byte()
6124 || (i.tm.opcode_modifier.vexvvvv == VEXXDS in build_modrm_byte()
6125 && i.imm_operands == 1 in build_modrm_byte()
6126 && (i.types[0].bitfield.imm8 in build_modrm_byte()
6127 || i.types[i.operands - 1].bitfield.imm8 in build_modrm_byte()
6128 || i.rounding))); in build_modrm_byte()
6129 if (i.imm_operands == 2) in build_modrm_byte()
6133 if (i.types[0].bitfield.imm8) in build_modrm_byte()
6140 if (i.tm.opcode_modifier.evex) in build_modrm_byte()
6147 gas_assert (i.imm_operands == 2 in build_modrm_byte()
6148 && i.tm.opcode_modifier.sae in build_modrm_byte()
6149 && operand_type_check (i.types[0], imm)); in build_modrm_byte()
6150 if (operand_type_check (i.types[1], imm)) in build_modrm_byte()
6152 else if (operand_type_check (i.types[4], imm)) in build_modrm_byte()
6169 if (i.rounding && i.rounding->operand == (int) dest) in build_modrm_byte()
6172 if (i.tm.opcode_modifier.vexvvvv == VEXXDS) in build_modrm_byte()
6184 if (!i.tm.operand_types[source].bitfield.baseindex in build_modrm_byte()
6185 && i.tm.operand_types[dest].bitfield.baseindex) in build_modrm_byte()
6193 op = i.tm.operand_types[vvvv]; in build_modrm_byte()
6195 if ((dest + 1) >= i.operands in build_modrm_byte()
6203 i.vex.register_specifier = i.op[vvvv].regs; in build_modrm_byte()
6208 i.rm.mode = 3; in build_modrm_byte()
6215 if (!i.tm.operand_types[dest].bitfield.regmem in build_modrm_byte()
6216 && operand_type_check (i.tm.operand_types[dest], anymem) == 0) in build_modrm_byte()
6218 i.rm.reg = i.op[dest].regs->reg_num; in build_modrm_byte()
6219 i.rm.regmem = i.op[source].regs->reg_num; in build_modrm_byte()
6220 if ((i.op[dest].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6221 i.rex |= REX_R; in build_modrm_byte()
6222 if ((i.op[dest].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6223 i.vrex |= REX_R; in build_modrm_byte()
6224 if ((i.op[source].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6225 i.rex |= REX_B; in build_modrm_byte()
6226 if ((i.op[source].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6227 i.vrex |= REX_B; in build_modrm_byte()
6231 i.rm.reg = i.op[source].regs->reg_num; in build_modrm_byte()
6232 i.rm.regmem = i.op[dest].regs->reg_num; in build_modrm_byte()
6233 if ((i.op[dest].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6234 i.rex |= REX_B; in build_modrm_byte()
6235 if ((i.op[dest].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6236 i.vrex |= REX_B; in build_modrm_byte()
6237 if ((i.op[source].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6238 i.rex |= REX_R; in build_modrm_byte()
6239 if ((i.op[source].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6240 i.vrex |= REX_R; in build_modrm_byte()
6242 if (flag_code != CODE_64BIT && (i.rex & (REX_R | REX_B))) in build_modrm_byte()
6244 if (!i.types[0].bitfield.control in build_modrm_byte()
6245 && !i.types[1].bitfield.control) in build_modrm_byte()
6247 i.rex &= ~(REX_R | REX_B); in build_modrm_byte()
6255 if (i.mem_operands) in build_modrm_byte()
6260 for (op = 0; op < i.operands; op++) in build_modrm_byte()
6261 if (operand_type_check (i.types[op], anymem)) in build_modrm_byte()
6263 gas_assert (op < i.operands); in build_modrm_byte()
6265 if (i.tm.opcode_modifier.vecsib) in build_modrm_byte()
6267 if (i.index_reg->reg_num == RegEiz in build_modrm_byte()
6268 || i.index_reg->reg_num == RegRiz) in build_modrm_byte()
6271 i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING; in build_modrm_byte()
6272 if (!i.base_reg) in build_modrm_byte()
6274 i.sib.base = NO_BASE_REGISTER; in build_modrm_byte()
6275 i.sib.scale = i.log2_scale_factor; in build_modrm_byte()
6277 i.types[op].bitfield.vec_disp8 = 0; in build_modrm_byte()
6278 i.types[op].bitfield.disp8 = 0; in build_modrm_byte()
6279 i.types[op].bitfield.disp16 = 0; in build_modrm_byte()
6280 i.types[op].bitfield.disp64 = 0; in build_modrm_byte()
6284 i.types[op].bitfield.disp32 = 1; in build_modrm_byte()
6285 i.types[op].bitfield.disp32s = 0; in build_modrm_byte()
6289 i.types[op].bitfield.disp32 = 0; in build_modrm_byte()
6290 i.types[op].bitfield.disp32s = 1; in build_modrm_byte()
6293 i.sib.index = i.index_reg->reg_num; in build_modrm_byte()
6294 if ((i.index_reg->reg_flags & RegRex) != 0) in build_modrm_byte()
6295 i.rex |= REX_X; in build_modrm_byte()
6296 if ((i.index_reg->reg_flags & RegVRex) != 0) in build_modrm_byte()
6297 i.vrex |= REX_X; in build_modrm_byte()
6302 if (i.base_reg == 0) in build_modrm_byte()
6304 i.rm.mode = 0; in build_modrm_byte()
6305 if (!i.disp_operands) in build_modrm_byte()
6310 if (i.tm.opcode_modifier.vecsib) in build_modrm_byte()
6311 i.types[op].bitfield.disp32 = 1; in build_modrm_byte()
6313 if (i.index_reg == 0) in build_modrm_byte()
6315 gas_assert (!i.tm.opcode_modifier.vecsib); in build_modrm_byte()
6323 i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING; in build_modrm_byte()
6324 i.sib.base = NO_BASE_REGISTER; in build_modrm_byte()
6325 i.sib.index = NO_INDEX_REGISTER; in build_modrm_byte()
6326 i.types[op] = ((i.prefix[ADDR_PREFIX] == 0) in build_modrm_byte()
6330 ^ (i.prefix[ADDR_PREFIX] != 0)) in build_modrm_byte()
6332 i.rm.regmem = NO_BASE_REGISTER_16; in build_modrm_byte()
6333 i.types[op] = disp16; in build_modrm_byte()
6337 i.rm.regmem = NO_BASE_REGISTER; in build_modrm_byte()
6338 i.types[op] = disp32; in build_modrm_byte()
6341 else if (!i.tm.opcode_modifier.vecsib) in build_modrm_byte()
6344 if (i.index_reg->reg_num == RegEiz in build_modrm_byte()
6345 || i.index_reg->reg_num == RegRiz) in build_modrm_byte()
6346 i.sib.index = NO_INDEX_REGISTER; in build_modrm_byte()
6348 i.sib.index = i.index_reg->reg_num; in build_modrm_byte()
6349 i.sib.base = NO_BASE_REGISTER; in build_modrm_byte()
6350 i.sib.scale = i.log2_scale_factor; in build_modrm_byte()
6351 i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING; in build_modrm_byte()
6353 i.types[op].bitfield.vec_disp8 = 0; in build_modrm_byte()
6354 i.types[op].bitfield.disp8 = 0; in build_modrm_byte()
6355 i.types[op].bitfield.disp16 = 0; in build_modrm_byte()
6356 i.types[op].bitfield.disp64 = 0; in build_modrm_byte()
6360 i.types[op].bitfield.disp32 = 1; in build_modrm_byte()
6361 i.types[op].bitfield.disp32s = 0; in build_modrm_byte()
6365 i.types[op].bitfield.disp32 = 0; in build_modrm_byte()
6366 i.types[op].bitfield.disp32s = 1; in build_modrm_byte()
6368 if ((i.index_reg->reg_flags & RegRex) != 0) in build_modrm_byte()
6369 i.rex |= REX_X; in build_modrm_byte()
6373 else if (i.base_reg->reg_num == RegRip || in build_modrm_byte()
6374 i.base_reg->reg_num == RegEip) in build_modrm_byte()
6376 gas_assert (!i.tm.opcode_modifier.vecsib); in build_modrm_byte()
6377 i.rm.regmem = NO_BASE_REGISTER; in build_modrm_byte()
6378 i.types[op].bitfield.disp8 = 0; in build_modrm_byte()
6379 i.types[op].bitfield.disp16 = 0; in build_modrm_byte()
6380 i.types[op].bitfield.disp32 = 0; in build_modrm_byte()
6381 i.types[op].bitfield.disp32s = 1; in build_modrm_byte()
6382 i.types[op].bitfield.disp64 = 0; in build_modrm_byte()
6383 i.types[op].bitfield.vec_disp8 = 0; in build_modrm_byte()
6384 i.flags[op] |= Operand_PCrel; in build_modrm_byte()
6385 if (! i.disp_operands) in build_modrm_byte()
6388 else if (i.base_reg->reg_type.bitfield.reg16) in build_modrm_byte()
6390 gas_assert (!i.tm.opcode_modifier.vecsib); in build_modrm_byte()
6391 switch (i.base_reg->reg_num) in build_modrm_byte()
6394 if (i.index_reg == 0) in build_modrm_byte()
6395 i.rm.regmem = 7; in build_modrm_byte()
6397 i.rm.regmem = i.index_reg->reg_num - 6; in build_modrm_byte()
6401 if (i.index_reg == 0) in build_modrm_byte()
6403 i.rm.regmem = 6; in build_modrm_byte()
6404 if (operand_type_check (i.types[op], disp) == 0) in build_modrm_byte()
6407 if (i.tm.operand_types[op].bitfield.vec_disp8) in build_modrm_byte()
6408 i.types[op].bitfield.vec_disp8 = 1; in build_modrm_byte()
6410 i.types[op].bitfield.disp8 = 1; in build_modrm_byte()
6415 i.rm.regmem = i.index_reg->reg_num - 6 + 2; in build_modrm_byte()
6418 i.rm.regmem = i.base_reg->reg_num - 6 + 4; in build_modrm_byte()
6420 i.rm.mode = mode_from_disp_size (i.types[op]); in build_modrm_byte()
6425 && operand_type_check (i.types[op], disp)) in build_modrm_byte()
6429 temp.bitfield.disp8 = i.types[op].bitfield.disp8; in build_modrm_byte()
6431 = i.types[op].bitfield.vec_disp8; in build_modrm_byte()
6432 i.types[op] = temp; in build_modrm_byte()
6433 if (i.prefix[ADDR_PREFIX] == 0) in build_modrm_byte()
6434 i.types[op].bitfield.disp32s = 1; in build_modrm_byte()
6436 i.types[op].bitfield.disp32 = 1; in build_modrm_byte()
6439 if (!i.tm.opcode_modifier.vecsib) in build_modrm_byte()
6440 i.rm.regmem = i.base_reg->reg_num; in build_modrm_byte()
6441 if ((i.base_reg->reg_flags & RegRex) != 0) in build_modrm_byte()
6442 i.rex |= REX_B; in build_modrm_byte()
6443 i.sib.base = i.base_reg->reg_num; in build_modrm_byte()
6446 if (!(i.base_reg->reg_flags & RegRex) in build_modrm_byte()
6447 && (i.base_reg->reg_num == EBP_REG_NUM in build_modrm_byte()
6448 || i.base_reg->reg_num == ESP_REG_NUM)) in build_modrm_byte()
6450 if (i.base_reg->reg_num == 5 && i.disp_operands == 0) in build_modrm_byte()
6453 if (i.tm.operand_types [op].bitfield.vec_disp8) in build_modrm_byte()
6454 i.types[op].bitfield.vec_disp8 = 1; in build_modrm_byte()
6456 i.types[op].bitfield.disp8 = 1; in build_modrm_byte()
6458 i.sib.scale = i.log2_scale_factor; in build_modrm_byte()
6459 if (i.index_reg == 0) in build_modrm_byte()
6461 gas_assert (!i.tm.opcode_modifier.vecsib); in build_modrm_byte()
6467 i.sib.index = NO_INDEX_REGISTER; in build_modrm_byte()
6469 else if (!i.tm.opcode_modifier.vecsib) in build_modrm_byte()
6471 if (i.index_reg->reg_num == RegEiz in build_modrm_byte()
6472 || i.index_reg->reg_num == RegRiz) in build_modrm_byte()
6473 i.sib.index = NO_INDEX_REGISTER; in build_modrm_byte()
6475 i.sib.index = i.index_reg->reg_num; in build_modrm_byte()
6476 i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING; in build_modrm_byte()
6477 if ((i.index_reg->reg_flags & RegRex) != 0) in build_modrm_byte()
6478 i.rex |= REX_X; in build_modrm_byte()
6481 if (i.disp_operands in build_modrm_byte()
6482 && (i.reloc[op] == BFD_RELOC_386_TLS_DESC_CALL in build_modrm_byte()
6483 || i.reloc[op] == BFD_RELOC_X86_64_TLSDESC_CALL)) in build_modrm_byte()
6484 i.rm.mode = 0; in build_modrm_byte()
6488 && !i.disp_operands in build_modrm_byte()
6489 && i.disp_encoding) in build_modrm_byte()
6492 if (i.disp_encoding == disp_encoding_8bit) in build_modrm_byte()
6493 i.types[op].bitfield.disp8 = 1; in build_modrm_byte()
6495 i.types[op].bitfield.disp32 = 1; in build_modrm_byte()
6497 i.rm.mode = mode_from_disp_size (i.types[op]); in build_modrm_byte()
6507 gas_assert (i.op[op].disps == 0); in build_modrm_byte()
6508 exp = &disp_expressions[i.disp_operands++]; in build_modrm_byte()
6509 i.op[op].disps = exp; in build_modrm_byte()
6521 if (i.tm.opcode_modifier.vexsources == XOP2SOURCES) in build_modrm_byte()
6523 if (operand_type_check (i.types[0], imm)) in build_modrm_byte()
6524 i.vex.register_specifier = NULL; in build_modrm_byte()
6529 if (i.tm.opcode_modifier.vexw == VEXW0) in build_modrm_byte()
6530 i.vex.register_specifier = i.op[0].regs; in build_modrm_byte()
6532 i.vex.register_specifier = i.op[1].regs; in build_modrm_byte()
6537 i.rm.reg = i.op[2].regs->reg_num; in build_modrm_byte()
6538 if ((i.op[2].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6539 i.rex |= REX_R; in build_modrm_byte()
6542 if (!i.mem_operands) in build_modrm_byte()
6544 i.rm.mode = 3; in build_modrm_byte()
6546 if (i.tm.opcode_modifier.vexw == VEXW0) in build_modrm_byte()
6547 i.rm.regmem = i.op[1].regs->reg_num; in build_modrm_byte()
6549 i.rm.regmem = i.op[0].regs->reg_num; in build_modrm_byte()
6551 if ((i.op[1].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6552 i.rex |= REX_B; in build_modrm_byte()
6555 else if (i.tm.opcode_modifier.vexvvvv == VEXLWP) in build_modrm_byte()
6557 i.vex.register_specifier = i.op[2].regs; in build_modrm_byte()
6558 if (!i.mem_operands) in build_modrm_byte()
6560 i.rm.mode = 3; in build_modrm_byte()
6561 i.rm.regmem = i.op[1].regs->reg_num; in build_modrm_byte()
6562 if ((i.op[1].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6563 i.rex |= REX_B; in build_modrm_byte()
6570 else if (i.reg_operands) in build_modrm_byte()
6575 for (op = 0; op < i.operands; op++) in build_modrm_byte()
6576 if (i.types[op].bitfield.reg8 in build_modrm_byte()
6577 || i.types[op].bitfield.reg16 in build_modrm_byte()
6578 || i.types[op].bitfield.reg32 in build_modrm_byte()
6579 || i.types[op].bitfield.reg64 in build_modrm_byte()
6580 || i.types[op].bitfield.regmmx in build_modrm_byte()
6581 || i.types[op].bitfield.regxmm in build_modrm_byte()
6582 || i.types[op].bitfield.regymm in build_modrm_byte()
6583 || i.types[op].bitfield.regbnd in build_modrm_byte()
6584 || i.types[op].bitfield.regzmm in build_modrm_byte()
6585 || i.types[op].bitfield.regmask in build_modrm_byte()
6586 || i.types[op].bitfield.sreg2 in build_modrm_byte()
6587 || i.types[op].bitfield.sreg3 in build_modrm_byte()
6588 || i.types[op].bitfield.control in build_modrm_byte()
6589 || i.types[op].bitfield.debug in build_modrm_byte()
6590 || i.types[op].bitfield.test) in build_modrm_byte()
6595 else if (i.tm.opcode_modifier.vexvvvv == VEXXDS) in build_modrm_byte()
6604 gas_assert (op < i.operands); in build_modrm_byte()
6610 if (!i.tm.operand_types[op].bitfield.baseindex in build_modrm_byte()
6611 && i.tm.operand_types[op + 1].bitfield.baseindex) in build_modrm_byte()
6616 && op < i.operands); in build_modrm_byte()
6621 gas_assert (vex_reg < i.operands); in build_modrm_byte()
6625 else if (i.tm.opcode_modifier.vexvvvv == VEXNDD) in build_modrm_byte()
6629 if (i.mem_operands == 0) in build_modrm_byte()
6632 gas_assert ((op + 2) == i.operands); in build_modrm_byte()
6638 gas_assert (op < 2 && i.operands == 2); in build_modrm_byte()
6643 gas_assert (op < i.operands); in build_modrm_byte()
6647 i386_operand_type *type = &i.tm.operand_types[vex_reg]; in build_modrm_byte()
6657 i.vex.register_specifier = i.op[vex_reg].regs; in build_modrm_byte()
6665 if (i.tm.extension_opcode != None) in build_modrm_byte()
6667 i.rm.regmem = i.op[op].regs->reg_num; in build_modrm_byte()
6668 if ((i.op[op].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6669 i.rex |= REX_B; in build_modrm_byte()
6670 if ((i.op[op].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6671 i.vrex |= REX_B; in build_modrm_byte()
6675 i.rm.reg = i.op[op].regs->reg_num; in build_modrm_byte()
6676 if ((i.op[op].regs->reg_flags & RegRex) != 0) in build_modrm_byte()
6677 i.rex |= REX_R; in build_modrm_byte()
6678 if ((i.op[op].regs->reg_flags & RegVRex) != 0) in build_modrm_byte()
6679 i.vrex |= REX_R; in build_modrm_byte()
6686 if (!i.mem_operands) in build_modrm_byte()
6687 i.rm.mode = 3; in build_modrm_byte()
6691 if (i.tm.extension_opcode != None) in build_modrm_byte()
6692 i.rm.reg = i.tm.extension_opcode; in build_modrm_byte()
6709 size = i.disp_encoding == disp_encoding_32bit ? BIG : SMALL; in output_branch()
6712 if (i.prefix[DATA_PREFIX] != 0) in output_branch()
6715 i.prefixes -= 1; in output_branch()
6719 if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE /* not taken */ in output_branch()
6720 || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE /* taken */) in output_branch()
6723 i.prefixes--; in output_branch()
6725 if (i.prefix[REX_PREFIX] != 0) in output_branch()
6728 i.prefixes--; in output_branch()
6732 if (i.prefix[BND_PREFIX] != 0) in output_branch()
6734 FRAG_APPEND_1_CHAR (i.prefix[BND_PREFIX]); in output_branch()
6735 i.prefixes -= 1; in output_branch()
6738 if (i.prefixes != 0 && !intel_syntax) in output_branch()
6749 if (i.prefix[DATA_PREFIX] != 0) in output_branch()
6751 if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE in output_branch()
6752 || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE) in output_branch()
6753 *p++ = i.prefix[SEG_PREFIX]; in output_branch()
6754 if (i.prefix[REX_PREFIX] != 0) in output_branch()
6755 *p++ = i.prefix[REX_PREFIX]; in output_branch()
6756 *p = i.tm.base_opcode; in output_branch()
6766 sym = i.op[0].disps->X_add_symbol; in output_branch()
6767 off = i.op[0].disps->X_add_number; in output_branch()
6769 if (i.op[0].disps->X_op != O_constant in output_branch()
6770 && i.op[0].disps->X_op != O_symbol) in output_branch()
6773 sym = make_expr_symbol (i.op[0].disps); in output_branch()
6781 || i.reloc[0] != NO_RELOC in output_branch()
6782 || (i.bnd_prefix == NULL && !add_bnd_prefix)) in output_branch()
6783 ? i.reloc[0] in output_branch()
6795 if (i.tm.opcode_modifier.jumpbyte) in output_jump()
6799 if (i.prefix[ADDR_PREFIX] != 0) in output_jump()
6802 i.prefixes -= 1; in output_jump()
6805 if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE /* not taken */ in output_jump()
6806 || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE /* taken */) in output_jump()
6808 FRAG_APPEND_1_CHAR (i.prefix[SEG_PREFIX]); in output_jump()
6809 i.prefixes--; in output_jump()
6820 if (i.prefix[DATA_PREFIX] != 0) in output_jump()
6823 i.prefixes -= 1; in output_jump()
6832 if (i.prefix[REX_PREFIX] != 0) in output_jump()
6834 FRAG_APPEND_1_CHAR (i.prefix[REX_PREFIX]); in output_jump()
6835 i.prefixes -= 1; in output_jump()
6839 if (i.prefix[BND_PREFIX] != 0) in output_jump()
6841 FRAG_APPEND_1_CHAR (i.prefix[BND_PREFIX]); in output_jump()
6842 i.prefixes -= 1; in output_jump()
6845 if (i.prefixes != 0 && !intel_syntax) in output_jump()
6848 p = frag_more (i.tm.opcode_length + size); in output_jump()
6849 switch (i.tm.opcode_length) in output_jump()
6852 *p++ = i.tm.base_opcode >> 8; in output_jump()
6854 *p++ = i.tm.base_opcode; in output_jump()
6861 i.op[0].disps, 1, reloc (size, 1, 1, in output_jump()
6862 (i.bnd_prefix != NULL in output_jump()
6864 i.reloc[0])); in output_jump()
6886 if (i.prefix[DATA_PREFIX] != 0) in output_interseg_jump()
6889 i.prefixes -= 1; in output_interseg_jump()
6892 if (i.prefix[REX_PREFIX] != 0) in output_interseg_jump()
6895 i.prefixes -= 1; in output_interseg_jump()
6902 if (i.prefixes != 0 && !intel_syntax) in output_interseg_jump()
6908 if (i.prefix[DATA_PREFIX] != 0) in output_interseg_jump()
6911 if (i.prefix[REX_PREFIX] != 0) in output_interseg_jump()
6912 *p++ = i.prefix[REX_PREFIX]; in output_interseg_jump()
6914 *p++ = i.tm.base_opcode; in output_interseg_jump()
6915 if (i.op[1].imms->X_op == O_constant) in output_interseg_jump()
6917 offsetT n = i.op[1].imms->X_add_number; in output_interseg_jump()
6930 i.op[1].imms, 0, reloc (size, 0, 0, 0, i.reloc[1])); in output_interseg_jump()
6931 if (i.op[0].imms->X_op != O_constant) in output_interseg_jump()
6933 i.tm.name); in output_interseg_jump()
6934 md_number_to_chars (p + size, (valueT) i.op[0].imms->X_add_number, 2); in output_interseg_jump()
6952 if (i.tm.opcode_modifier.jump) in output_insn()
6954 else if (i.tm.opcode_modifier.jumpbyte in output_insn()
6955 || i.tm.opcode_modifier.jumpdword) in output_insn()
6957 else if (i.tm.opcode_modifier.jumpintersegment) in output_insn()
6971 if (i.tm.base_opcode == LOCK_PREFIX_OPCODE) in output_insn()
6973 i.prefix[LOCK_PREFIX] = 0; in output_insn()
6978 if (!i.tm.opcode_modifier.vex && !i.tm.opcode_modifier.evex) in output_insn()
6980 switch (i.tm.opcode_length) in output_insn()
6983 if (i.tm.base_opcode & 0xff000000) in output_insn()
6985 prefix = (i.tm.base_opcode >> 24) & 0xff; in output_insn()
6990 if ((i.tm.base_opcode & 0xff0000) != 0) in output_insn()
6992 prefix = (i.tm.base_opcode >> 16) & 0xff; in output_insn()
6993 if (i.tm.cpu_flags.bitfield.cpupadlock) in output_insn()
6997 || (i.prefix[REP_PREFIX] in output_insn()
7016 && i.operands == 2 in output_insn()
7017 && i.reloc[0] == BFD_RELOC_X86_64_GOTTPOFF in output_insn()
7018 && i.prefix[REX_PREFIX] == 0) in output_insn()
7023 for (j = ARRAY_SIZE (i.prefix), q = i.prefix; j > 0; j--, q++) in output_insn()
7029 for (j = 0, q = i.prefix; j < ARRAY_SIZE (i.prefix); j++, q++) in output_insn()
7049 if (i.vrex) in output_insn()
7052 p = frag_more (i.vex.length); in output_insn()
7053 for (j = 0; j < i.vex.length; j++) in output_insn()
7054 p[j] = i.vex.bytes[j]; in output_insn()
7058 if (i.tm.opcode_length == 1) in output_insn()
7060 FRAG_APPEND_1_CHAR (i.tm.base_opcode); in output_insn()
7064 switch (i.tm.opcode_length) in output_insn()
7068 *p++ = (i.tm.base_opcode >> 24) & 0xff; in output_insn()
7069 *p++ = (i.tm.base_opcode >> 16) & 0xff; in output_insn()
7073 *p++ = (i.tm.base_opcode >> 16) & 0xff; in output_insn()
7084 *p++ = (i.tm.base_opcode >> 8) & 0xff; in output_insn()
7085 *p = i.tm.base_opcode & 0xff; in output_insn()
7089 if (i.tm.opcode_modifier.modrm) in output_insn()
7091 FRAG_APPEND_1_CHAR ((i.rm.regmem << 0 in output_insn()
7092 | i.rm.reg << 3 in output_insn()
7093 | i.rm.mode << 6)); in output_insn()
7098 if (i.rm.regmem == ESCAPE_TO_TWO_BYTE_ADDRESSING in output_insn()
7099 && i.rm.mode != 3 in output_insn()
7100 && !(i.base_reg && i.base_reg->reg_type.bitfield.reg16)) in output_insn()
7101 FRAG_APPEND_1_CHAR ((i.sib.base << 0 in output_insn()
7102 | i.sib.index << 3 in output_insn()
7103 | i.sib.scale << 6)); in output_insn()
7106 if (i.disp_operands) in output_insn()
7109 if (i.imm_operands) in output_insn()
7116 pi ("" /*line*/, &i); in output_insn()
7129 if (i.types[n].bitfield.vec_disp8) in disp_size()
7131 else if (i.types[n].bitfield.disp64) in disp_size()
7133 else if (i.types[n].bitfield.disp8) in disp_size()
7135 else if (i.types[n].bitfield.disp16) in disp_size()
7146 if (i.types[n].bitfield.imm64) in imm_size()
7148 else if (i.types[n].bitfield.imm8 || i.types[n].bitfield.imm8s) in imm_size()
7150 else if (i.types[n].bitfield.imm16) in imm_size()
7161 for (n = 0; n < i.operands; n++) in output_disp()
7163 if (i.types[n].bitfield.vec_disp8 in output_disp()
7164 || operand_type_check (i.types[n], disp)) in output_disp()
7166 if (i.op[n].disps->X_op == O_constant) in output_disp()
7169 offsetT val = i.op[n].disps->X_add_number; in output_disp()
7171 if (i.types[n].bitfield.vec_disp8) in output_disp()
7172 val >>= i.memshift; in output_disp()
7181 int sign = i.types[n].bitfield.disp32s; in output_disp()
7182 int pcrel = (i.flags[n] & Operand_PCrel) != 0; in output_disp()
7185 gas_assert (!i.types[n].bitfield.disp8); in output_disp()
7190 if (pcrel && i.imm_operands) in output_disp()
7195 for (n1 = 0; n1 < i.operands; n1++) in output_disp()
7196 if (operand_type_check (i.types[n1], imm)) in output_disp()
7202 i.op[n].disps->X_add_number -= sz; in output_disp()
7210 (i.bnd_prefix != NULL in output_disp()
7212 i.reloc[n]); in output_disp()
7214 && GOT_symbol == i.op[n].disps->X_add_symbol in output_disp()
7219 && (i.op[n].disps->X_op == O_symbol in output_disp()
7220 || (i.op[n].disps->X_op == O_add in output_disp()
7222 (i.op[n].disps->X_op_symbol)->X_op) in output_disp()
7244 i.op[n].imms->X_add_number += add; in output_disp()
7255 i.op[n].disps, pcrel, reloc_type); in output_disp()
7267 for (n = 0; n < i.operands; n++) in output_imm()
7270 if (i.rounding && (int) n == i.rounding->operand) in output_imm()
7273 if (operand_type_check (i.types[n], imm)) in output_imm()
7275 if (i.op[n].imms->X_op == O_constant) in output_imm()
7280 val = offset_in_range (i.op[n].imms->X_add_number, in output_imm()
7295 if (i.types[n].bitfield.imm32s in output_imm()
7296 && (i.suffix == QWORD_MNEM_SUFFIX in output_imm()
7297 || (!i.suffix && i.tm.opcode_modifier.no_lsuf))) in output_imm()
7303 reloc_type = reloc (size, 0, sign, 0, i.reloc[n]); in output_imm()
7351 && GOT_symbol == i.op[n].imms->X_add_symbol in output_imm()
7352 && (i.op[n].imms->X_op == O_symbol in output_imm()
7353 || (i.op[n].imms->X_op == O_add in output_imm()
7355 (i.op[n].imms->X_op_symbol)->X_op) in output_imm()
7379 i.op[n].imms->X_add_number += add; in output_imm()
7382 i.op[n].imms, 0, reloc_type); in output_imm()
7799 if (i.broadcast) in check_VecOperations()
7824 i.broadcast = &broadcast_op; in check_VecOperations()
7837 if (!i.mask) in check_VecOperations()
7842 i.mask = &mask_op; in check_VecOperations()
7846 if (i.mask->mask) in check_VecOperations()
7849 i.mask->mask = mask; in check_VecOperations()
7853 if (i.mask->operand != this_operand) in check_VecOperations()
7865 if (!i.mask) in check_VecOperations()
7870 i.mask = &mask_op; in check_VecOperations()
7874 if (i.mask->zeroing) in check_VecOperations()
7881 i.mask->zeroing = 1; in check_VecOperations()
7885 if (i.mask->operand != this_operand) in check_VecOperations()
7926 if (i.imm_operands == MAX_IMMEDIATE_OPERANDS) in i386_immediate()
7933 exp = &im_expressions[i.imm_operands++]; in i386_immediate()
7934 i.op[this_operand].imms = exp; in i386_immediate()
7942 gotfree_input_line = lex_got (&i.reloc[this_operand], NULL, &types, in i386_immediate()
7943 (i.bnd_prefix != NULL in i386_immediate()
7990 i.types[this_operand].bitfield.imm64 = 1; in i386_finalize_immediate()
8021 i.types[this_operand].bitfield.imm8 = 1; in i386_finalize_immediate()
8022 i.types[this_operand].bitfield.imm16 = 1; in i386_finalize_immediate()
8023 i.types[this_operand].bitfield.imm32 = 1; in i386_finalize_immediate()
8024 i.types[this_operand].bitfield.imm32s = 1; in i386_finalize_immediate()
8025 i.types[this_operand].bitfield.imm64 = 1; in i386_finalize_immediate()
8026 i.types[this_operand] = operand_type_and (i.types[this_operand], in i386_finalize_immediate()
8045 i.log2_scale_factor = 0; in i386_scale()
8048 i.log2_scale_factor = 1; in i386_scale()
8051 i.log2_scale_factor = 2; in i386_scale()
8054 i.log2_scale_factor = 3; in i386_scale()
8068 if (i.log2_scale_factor != 0 && i.index_reg == 0) in i386_scale()
8071 1 << i.log2_scale_factor); in i386_scale()
8072 i.log2_scale_factor = 0; in i386_scale()
8090 if (i.disp_operands == MAX_MEMORY_OPERANDS) in i386_displacement()
8098 if ((i.types[this_operand].bitfield.jumpabsolute) in i386_displacement()
8103 override = (i.prefix[ADDR_PREFIX] != 0); in i386_displacement()
8122 override = (i.prefix[DATA_PREFIX] != 0); in i386_displacement()
8125 if (override || i.suffix == WORD_MNEM_SUFFIX) in i386_displacement()
8136 override = (i.suffix == (flag_code != CODE_16BIT in i386_displacement()
8147 i.types[this_operand] = operand_type_or (i.types[this_operand], in i386_displacement()
8150 exp = &disp_expressions[i.disp_operands]; in i386_displacement()
8151 i.op[this_operand].disps = exp; in i386_displacement()
8152 i.disp_operands++; in i386_displacement()
8162 if (i.types[this_operand].bitfield.baseIndex in i386_displacement()
8201 gotfree_input_line = lex_got (&i.reloc[this_operand], NULL, &types, in i386_displacement()
8202 (i.bnd_prefix != NULL in i386_displacement()
8241 if (i.reloc[this_operand] == BFD_RELOC_386_GOTOFF in i386_finalize_displacement()
8242 || i.reloc[this_operand] == BFD_RELOC_X86_64_GOTPCREL in i386_finalize_displacement()
8243 || i.reloc[this_operand] == BFD_RELOC_X86_64_GOTOFF64) in i386_finalize_displacement()
8254 if (i.reloc[this_operand] == BFD_RELOC_X86_64_GOTPCREL) in i386_finalize_displacement()
8255 i.reloc[this_operand] = BFD_RELOC_32_PCREL; in i386_finalize_displacement()
8256 else if (i.reloc[this_operand] == BFD_RELOC_X86_64_GOTOFF64) in i386_finalize_displacement()
8257 i.reloc[this_operand] = BFD_RELOC_64; in i386_finalize_displacement()
8259 i.reloc[this_operand] = BFD_RELOC_32; in i386_finalize_displacement()
8273 && !i.prefix[ADDR_PREFIX] in i386_finalize_displacement()
8278 i.types[this_operand].bitfield.disp32 = 0; in i386_finalize_displacement()
8281 i.types[this_operand].bitfield.disp32s = 0; in i386_finalize_displacement()
8282 if (i.types[this_operand].bitfield.baseindex) in i386_finalize_displacement()
8307 bigdisp = i.types[this_operand]; in i386_finalize_displacement()
8314 i.types[this_operand] = operand_type_and (i.types[this_operand], in i386_finalize_displacement()
8329 if (i.prefix[ADDR_PREFIX]) in i386_index_check()
8336 if (i.mem_operands == 0) in i386_index_check()
8339 const reg_entry *addr_reg = i.base_reg; in i386_index_check()
8342 addr_reg = i.index_reg; in i386_index_check()
8356 i.prefix[ADDR_PREFIX] = ADDR_PREFIX_OPCODE; in i386_index_check()
8357 i.prefixes += 1; in i386_index_check()
8365 && (i.types[this_operand].bitfield.disp16 in i386_index_check()
8366 || i.types[this_operand].bitfield.disp32)) in i386_index_check()
8367 i.types[this_operand] in i386_index_check()
8368 = operand_type_xor (i.types[this_operand], disp16_32); in i386_index_check()
8378 || i.mem_operands)) in i386_index_check()
8398 || ((!i.mem_operands != !intel_syntax) in i386_index_check()
8409 if (i.base_reg != expected_reg in i386_index_check()
8410 || i.index_reg in i386_index_check()
8411 || operand_type_check (i.types[this_operand], disp)) in i386_index_check()
8415 if (i.mem_operands in i386_index_check()
8416 && i.base_reg in i386_index_check()
8418 && i.base_reg->reg_type.bitfield.reg64) in i386_index_check()
8420 ? i.base_reg->reg_type.bitfield.reg32 in i386_index_check()
8421 : i.base_reg->reg_type.bitfield.reg16))) in i386_index_check()
8445 if ((i.base_reg in i386_index_check()
8447 ? !i.base_reg->reg_type.bitfield.reg64 in i386_index_check()
8448 : !i.base_reg->reg_type.bitfield.reg32) in i386_index_check()
8449 && (i.index_reg in i386_index_check()
8450 || (i.base_reg->reg_num in i386_index_check()
8452 || (i.index_reg in i386_index_check()
8453 && !i.index_reg->reg_type.bitfield.regxmm in i386_index_check()
8454 && !i.index_reg->reg_type.bitfield.regymm in i386_index_check()
8455 && !i.index_reg->reg_type.bitfield.regzmm in i386_index_check()
8457 ? !(i.index_reg->reg_type.bitfield.reg64 in i386_index_check()
8458 || i.index_reg->reg_num == RegRiz) in i386_index_check()
8459 : !(i.index_reg->reg_type.bitfield.reg32 in i386_index_check()
8460 || i.index_reg->reg_num == RegEiz)) in i386_index_check()
8461 || !i.index_reg->reg_type.bitfield.baseindex))) in i386_index_check()
8467 if ((i.base_reg in i386_index_check()
8468 && (!i.base_reg->reg_type.bitfield.reg16 in i386_index_check()
8469 || !i.base_reg->reg_type.bitfield.baseindex)) in i386_index_check()
8470 || (i.index_reg in i386_index_check()
8471 && (!i.index_reg->reg_type.bitfield.reg16 in i386_index_check()
8472 || !i.index_reg->reg_type.bitfield.baseindex in i386_index_check()
8473 || !(i.base_reg in i386_index_check()
8474 && i.base_reg->reg_num < 6 in i386_index_check()
8475 && i.index_reg->reg_num >= 6 in i386_index_check()
8476 && i.log2_scale_factor == 0)))) in i386_index_check()
8501 if (!i.rounding) in RC_SAE_immediate()
8505 i.rounding = &rc_op; in RC_SAE_immediate()
8532 exp = &im_expressions[i.imm_operands++]; in RC_SAE_immediate()
8533 i.op[this_operand].imms = exp; in RC_SAE_immediate()
8540 i.types[this_operand].bitfield.imm8 = 1; in RC_SAE_immediate()
8564 i.types[this_operand].bitfield.jumpabsolute = 1; in i386_att_operand()
8584 i.seg[i.mem_operands] = &es; in i386_att_operand()
8587 i.seg[i.mem_operands] = &cs; in i386_att_operand()
8590 i.seg[i.mem_operands] = &ss; in i386_att_operand()
8593 i.seg[i.mem_operands] = &ds; in i386_att_operand()
8596 i.seg[i.mem_operands] = &fs; in i386_att_operand()
8599 i.seg[i.mem_operands] = &gs; in i386_att_operand()
8622 i.types[this_operand].bitfield.jumpabsolute = 1; in i386_att_operand()
8642 i.types[this_operand] = operand_type_or (i.types[this_operand], in i386_att_operand()
8644 i.types[this_operand].bitfield.unspecified = 0; in i386_att_operand()
8645 i.op[this_operand].regs = r; in i386_att_operand()
8646 i.reg_operands++; in i386_att_operand()
8656 if (i.types[this_operand].bitfield.jumpabsolute) in i386_att_operand()
8682 if ((i.mem_operands == 1 in i386_att_operand()
8684 || i.mem_operands == 2) in i386_att_operand()
8738 || ((i.base_reg = parse_register (base_string, &end_op)) in i386_att_operand()
8743 i.types[this_operand].bitfield.baseindex = 1; in i386_att_operand()
8745 if (i.base_reg) in i386_att_operand()
8759 if ((i.index_reg = parse_register (base_string, &end_op)) in i386_att_operand()
8807 else if (!i.index_reg) in i386_att_operand()
8844 if (i.base_reg in i386_att_operand()
8845 && operand_type_equal (&i.base_reg->reg_type, in i386_att_operand()
8847 && i.index_reg == 0 in i386_att_operand()
8848 && i.log2_scale_factor == 0 in i386_att_operand()
8849 && i.seg[i.mem_operands] == 0 in i386_att_operand()
8850 && !operand_type_check (i.types[this_operand], disp)) in i386_att_operand()
8852 i.types[this_operand] = inoutportreg; in i386_att_operand()
8858 i.types[this_operand].bitfield.mem = 1; in i386_att_operand()
8859 i.mem_operands++; in i386_att_operand()
9441 i.need_vrex = 1; in parse_real_register()
9486 i.need_vrex = 1; in parse_register()