Lines Matching refs:md

66 #define PREV_SLOT	md.slot[(md.curr_slot + NUM_SLOTS - 1) % NUM_SLOTS]
67 #define CURR_SLOT md.slot[md.curr_slot]
341 md; variable
932 md.in.base = REG_GR + 32; in set_regstack()
933 md.loc.base = md.in.base + ins; in set_regstack()
934 md.out.base = md.loc.base + locs; in set_regstack()
936 md.in.num_regs = ins; in set_regstack()
937 md.loc.num_regs = locs; in set_regstack()
938 md.out.num_regs = outs; in set_regstack()
939 md.rot.num_regs = rots; in set_regstack()
952 if (!md.last_text_seg) in ia64_flush_insns()
958 subseg_set (md.last_text_seg, 0); in ia64_flush_insns()
960 while (md.num_slots_in_use > 0) in ia64_flush_insns()
1024 if (md.qp.X_op == O_register) in ia64_flush_insns()
1041 if (md.auto_align) in ia64_cons_align()
2963 pad = len % md.pointer_size; in ia64_estimate_size_before_relax()
2965 len += md.pointer_size - pad; in ia64_estimate_size_before_relax()
2970 size += md.pointer_size; in ia64_estimate_size_before_relax()
2994 pad = len % md.pointer_size; in ia64_convert_frag()
2996 len += md.pointer_size - pad; in ia64_convert_frag()
3001 size += md.pointer_size; in ia64_convert_frag()
3011 if (md.flags & EF_IA_64_ABI64) in ia64_convert_frag()
3023 | (len / md.pointer_size)), /* Length. */ in ia64_convert_frag()
3032 md_number_to_chars (frag->fr_literal + len + 8 - md.pointer_size + pad, 0, in ia64_convert_frag()
3033 md.pointer_size - pad); in ia64_convert_frag()
3036 md_number_to_chars (frag->fr_literal + size - md.pointer_size, 0, in ia64_convert_frag()
3037 md.pointer_size); in ia64_convert_frag()
3202 if (md.unwind_check == unwind_check_warning) in unwind_diagnostic()
3665 pad = size % md.pointer_size; in generate_unwind_image()
3667 size += md.pointer_size - pad; in generate_unwind_image()
3672 size += md.pointer_size; in generate_unwind_image()
3685 frag_align (md.pointer_size_shift, 0, 0); in generate_unwind_image()
3686 record_alignment (now_seg, md.pointer_size_shift); in generate_unwind_image()
3702 if (md.flags & EF_IA_64_BE) in generate_unwind_image()
3704 if (md.flags & EF_IA_64_ABI64) in generate_unwind_image()
3711 if (md.flags & EF_IA_64_ABI64) in generate_unwind_image()
3717 fix_new_exp (frag_now, frag_now_fix () - md.pointer_size, in generate_unwind_image()
3718 md.pointer_size, &exp, 0, reloc); in generate_unwind_image()
4257 (md.unwind_check == unwind_check_warning in dot_proc()
4375 if (md.unwind_check == unwind_check_error) in dot_prologue()
4414 int unwind_check = md.unwind_check; in dot_endp()
4416 md.unwind_check = unwind_check_error; in dot_endp()
4419 md.unwind_check = unwind_check; in dot_endp()
4443 subseg_set (md.last_text_seg, 0); in dot_endp()
4450 record_alignment (now_seg, md.pointer_size_shift); in dot_endp()
4454 memset (frag_more (3 * md.pointer_size), 0, 3 * md.pointer_size); in dot_endp()
4455 where = frag_now_fix () - (3 * md.pointer_size); in dot_endp()
4536 (md.unwind_check == unwind_check_warning in dot_endp()
4631 for (dr = md.dynreg[type]; dr && dr->num_regs; dr = dr->next) in dot_rot()
4633 hash_delete (md.dynreg_hash, dr->name, FALSE); in dot_rot()
4638 drpp = &md.dynreg[type]; in dot_rot()
4672 if (num_alloced > md.rot.num_regs) in dot_rot()
4675 md.rot.num_regs); in dot_rot()
4715 if (hash_insert (md.dynreg_hash, name, dr)) in dot_rot()
4775 md.flags &= ~EF_IA_64_BE; in dot_psr()
4777 md.flags |= EF_IA_64_BE; in dot_psr()
4779 md.flags &= ~EF_IA_64_ABI64; in dot_psr()
4781 md.flags |= EF_IA_64_ABI64; in dot_psr()
4848 md.keep_pending_output = 1; in cross_section()
4854 saved_auto_align = md.auto_align; in cross_section()
4856 md.auto_align = 0; in cross_section()
4859 md.auto_align = saved_auto_align; in cross_section()
4861 md.keep_pending_output = 0; in cross_section()
4900 int saved_auto_align = md.auto_align; in stmt_cons_ua()
4902 md.auto_align = 0; in stmt_cons_ua()
4904 md.auto_align = saved_auto_align; in stmt_cons_ua()
4959 gr_values[regno - REG_GR].path = md.path; in dot_reg_val()
4992 if (md.manual_bundling) in dot_dv_mode()
4996 md.mode_explicitly_set = 0; in dot_dv_mode()
4998 md.mode_explicitly_set = 1; in dot_dv_mode()
5000 md.detect_dv = 1; in dot_dv_mode()
5005 if (md.explicit_mode) in dot_dv_mode()
5007 md.explicit_mode = 0; in dot_dv_mode()
5011 if (!md.explicit_mode) in dot_dv_mode()
5013 md.explicit_mode = 1; in dot_dv_mode()
5017 if (md.explicit_mode != md.default_explicit_mode) in dot_dv_mode()
5019 md.explicit_mode = md.default_explicit_mode; in dot_dv_mode()
5020 md.mode_explicitly_set = 0; in dot_dv_mode()
5192 if (md.debug_dv) in dot_pred_rel()
5222 err = hash_insert (md.entry_hash, S_GET_NAME (symbolP), (void *) symbolP); in dot_entry()
5249 md.mem_offset.hint = 1; in dot_mem_offset()
5250 md.mem_offset.offset = get_absolute_expression (); in dot_mem_offset()
5258 md.mem_offset.base = get_absolute_expression (); in dot_mem_offset()
5429 err = hash_insert (md.reg_hash, S_GET_NAME (sym), (void *) sym); in declare_register()
6575 first = (md.curr_slot + NUM_SLOTS - md.num_slots_in_use) % NUM_SLOTS; in emit_one_bundle()
6577 n = MIN (3, md.num_slots_in_use); in emit_one_bundle()
6582 if (md.slot[first].user_template >= 0) in emit_one_bundle()
6583 user_template = template_val = md.slot[first].user_template; in emit_one_bundle()
6591 if (md.slot[curr].label_fixups && i != 0) in emit_one_bundle()
6593 type[i] = md.slot[curr].idesc->type; in emit_one_bundle()
6615 idesc = md.slot[curr].idesc; in emit_one_bundle()
6618 for (i = 0; i < 3 && md.num_slots_in_use > 0; ++i) in emit_one_bundle()
6621 unw_rec_list *ptr = md.slot[curr].unwind_record; in emit_one_bundle()
6635 for (j = 1; end_ptr == NULL && j < md.num_slots_in_use; ++j) in emit_one_bundle()
6636 end_ptr = md.slot[(curr + j) % NUM_SLOTS].unwind_record; in emit_one_bundle()
6646 for (ptr = md.slot[curr].unwind_record; ptr != last_ptr; in emit_one_bundle()
6654 md.slot[curr].unwind_record = last_ptr; in emit_one_bundle()
6658 manual_bundling_off = md.slot[curr].manual_bundling_off; in emit_one_bundle()
6659 if (md.slot[curr].manual_bundling_on) in emit_one_bundle()
6669 if (curr != first && md.slot[curr].user_template >= 0) in emit_one_bundle()
6676 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
6708 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
6721 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
6743 md.slot[curr].end_of_insn_group = 0; in emit_one_bundle()
6747 if (curr != first && md.slot[curr].label_fixups) in emit_one_bundle()
6751 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
6759 if (end_of_insn_group && md.num_slots_in_use >= 1) in emit_one_bundle()
6824 switch (md.hint_b) in emit_one_bundle()
6870 md.slot[curr].idesc = idesc; in emit_one_bundle()
6896 for (lfix = md.slot[curr].label_fixups; lfix; lfix = lfix->next) in emit_one_bundle()
6902 for (lfix = md.slot[curr].tag_fixups; lfix; lfix = lfix->next) in emit_one_bundle()
6909 || md.slot[curr].loc_directive_seen in emit_one_bundle()
6914 md.slot[curr].loc_directive_seen = 0; in emit_one_bundle()
6916 md.slot[curr].debug_line.flags |= DWARF2_FLAG_BASIC_BLOCK; in emit_one_bundle()
6918 dwarf2_gen_line_info (addr, &md.slot[curr].debug_line); in emit_one_bundle()
6921 build_insn (md.slot + curr, insn + i); in emit_one_bundle()
6923 ptr = md.slot[curr].unwind_record; in emit_one_bundle()
6934 md.slot[curr].unwind_record = NULL; in emit_one_bundle()
6937 for (j = 0; j < md.slot[curr].num_fixups; ++j) in emit_one_bundle()
6939 ifix = md.slot[curr].fixup + j; in emit_one_bundle()
6943 fix->fx_file = md.slot[curr].src_file; in emit_one_bundle()
6944 fix->fx_line = md.slot[curr].src_line; in emit_one_bundle()
6947 end_of_insn_group = md.slot[curr].end_of_insn_group; in emit_one_bundle()
6957 --md.num_slots_in_use; in emit_one_bundle()
6961 ia64_free_opcode (md.slot[curr].idesc); in emit_one_bundle()
6962 memset (md.slot + curr, 0, sizeof (md.slot[curr])); in emit_one_bundle()
6963 md.slot[curr].user_template = -1; in emit_one_bundle()
6971 idesc = md.slot[curr].idesc; in emit_one_bundle()
6976 if (md.num_slots_in_use > 0 && last_slot < 0) in emit_one_bundle()
6978 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
6982 --md.num_slots_in_use; in emit_one_bundle()
6984 ia64_free_opcode (md.slot[curr].idesc); in emit_one_bundle()
6985 memset (md.slot + curr, 0, sizeof (md.slot[curr])); in emit_one_bundle()
6986 md.slot[curr].user_template = -1; in emit_one_bundle()
6990 if (md.num_slots_in_use > 0) in emit_one_bundle()
6993 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
7005 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
7011 as_bad_where (md.slot[curr].src_file, md.slot[curr].src_line, in emit_one_bundle()
7015 know (md.num_slots_in_use < NUM_SLOTS); in emit_one_bundle()
7036 md.flags |= EF_IA_64_ABI64; in md_parse_option()
7040 md.flags &= ~EF_IA_64_ABI64; in md_parse_option()
7044 md.flags &= ~EF_IA_64_BE; in md_parse_option()
7049 md.flags |= EF_IA_64_BE; in md_parse_option()
7056 md.unwind_check = unwind_check_warning; in md_parse_option()
7058 md.unwind_check = unwind_check_error; in md_parse_option()
7066 md.hint_b = hint_b_ok; in md_parse_option()
7068 md.hint_b = hint_b_warning; in md_parse_option()
7070 md.hint_b = hint_b_error; in md_parse_option()
7078 md.tune = itanium1; in md_parse_option()
7080 md.tune = itanium2; in md_parse_option()
7128 md.detect_dv = 1; in md_parse_option()
7132 md.default_explicit_mode = 1; in md_parse_option()
7137 md.default_explicit_mode = 0; in md_parse_option()
7141 md.detect_dv = 0; in md_parse_option()
7145 md.debug_dv = 1; in md_parse_option()
7149 md.default_explicit_mode = 1; in md_parse_option()
7150 md.debug_dv = 1; in md_parse_option()
7154 md.debug_dv = 1; in md_parse_option()
7155 md.detect_dv = 0; in md_parse_option()
7169 md.flags |= EF_IA_64_CONS_GP; in md_parse_option()
7173 md.flags |= EF_IA_64_NOFUNCDESC_CONS_GP; in md_parse_option()
7253 switch (md.tune) in extra_goodness()
7289 md.auto_align = 1; in md_begin()
7290 md.explicit_mode = md.default_explicit_mode; in md_begin()
7377 if (md.tune != itanium1) in md_begin()
7448 md.slot[i].user_template = -1; in md_begin()
7450 md.pseudo_hash = hash_new (); in md_begin()
7453 err = hash_insert (md.pseudo_hash, pseudo_opcode[i].name, in md_begin()
7460 md.reg_hash = hash_new (); in md_begin()
7461 md.dynreg_hash = hash_new (); in md_begin()
7462 md.const_hash = hash_new (); in md_begin()
7463 md.entry_hash = hash_new (); in md_begin()
7509 md.indregsym[regnum - IND_CPUID] = declare_register (indirect_reg[i].name, regnum); in md_begin()
7517 err = hash_insert (md.const_hash, const_bits[i].name, in md_begin()
7526 if (md.flags & EF_IA_64_ABI64) in md_begin()
7536 if (md.flags & EF_IA_64_ABI64) in md_begin()
7538 md.pointer_size = 8; /* pointers are 8 bytes */ in md_begin()
7539 md.pointer_size_shift = 3; /* alignment is 8 bytes = 2^2 */ in md_begin()
7543 md.pointer_size = 4; /* pointers are 4 bytes */ in md_begin()
7544 md.pointer_size_shift = 2; /* alignment is 4 bytes = 2^2 */ in md_begin()
7547 md.mem_offset.hint = 0; in md_begin()
7548 md.path = 0; in md_begin()
7549 md.maxpaths = 0; in md_begin()
7550 md.entry_labels = NULL; in md_begin()
7560 md.flags = MD_FLAGS_DEFAULT; in ia64_init()
7563 md.detect_dv = 1; in ia64_init()
7566 md.unwind_check = unwind_check_warning; in ia64_init()
7567 md.hint_b = hint_b_error; in ia64_init()
7568 md.tune = itanium2; in ia64_init()
7578 if (md.flags & EF_IA_64_BE) in ia64_target_format()
7580 if (md.flags & EF_IA_64_ABI64) in ia64_target_format()
7599 if (md.flags & EF_IA_64_ABI64) in ia64_target_format()
7604 md.flags |= EF_IA_64_ARCHVER_1; in ia64_target_format()
7631 bfd_set_private_flags (stdoutput, md.flags); in ia64_end_of_source()
7633 md.mem_offset.hint = 0; in ia64_end_of_source()
7648 if (md.qp.X_op == O_register) in ia64_start_line()
7650 md.qp.X_op = O_absent; in ia64_start_line()
7657 if (md.detect_dv && !md.explicit_mode) in ia64_start_line()
7672 if (md.manual_bundling) in ia64_start_line()
7676 md.manual_bundling = 1; in ia64_start_line()
7680 if (md.detect_dv && !md.explicit_mode) in ia64_start_line()
7682 if (!md.mode_explicitly_set in ia64_start_line()
7683 && !md.default_explicit_mode) in ia64_start_line()
7691 if (!md.manual_bundling) in ia64_start_line()
7695 md.manual_bundling = 0; in ia64_start_line()
7698 if (md.detect_dv in ia64_start_line()
7699 && md.explicit_mode in ia64_start_line()
7700 && !md.mode_explicitly_set in ia64_start_line()
7701 && !md.default_explicit_mode) in ia64_start_line()
7716 expression_and_evaluate (&md.qp); in ia64_unrecognized_line()
7722 if (md.qp.X_op != O_register) in ia64_unrecognized_line()
7727 if (md.qp.X_add_number < REG_P || md.qp.X_add_number >= REG_P + 64) in ia64_unrecognized_line()
7741 if (md.qp.X_op == O_register) in ia64_unrecognized_line()
7823 md.last_text_seg = now_seg; in ia64_frob_label()
7831 if (md.path == md.maxpaths) in ia64_frob_label()
7833 md.maxpaths += 20; in ia64_frob_label()
7834 md.entry_labels = (const char **) in ia64_frob_label()
7835 xrealloc ((void *) md.entry_labels, in ia64_frob_label()
7836 md.maxpaths * sizeof (char *)); in ia64_frob_label()
7838 md.entry_labels[md.path++] = S_GET_NAME (sym); in ia64_frob_label()
7861 if (!md.keep_pending_output in ia64_flush_pending_output()
7912 l->X_op_symbol = md.indregsym[l->X_add_number - IND_CPUID]; in ia64_optimize_expr()
8027 sym = hash_find (md.reg_hash, name); in ia64_parse_name()
8035 cdesc = hash_find (md.const_hash, name); in ia64_parse_name()
8050 dr = &md.in; in ia64_parse_name()
8058 dr = &md.loc; in ia64_parse_name()
8066 dr = &md.out; in ia64_parse_name()
8102 if ((dr = hash_find (md.dynreg_hash, name))) in ia64_parse_name()
8345 if (md.mem_offset.hint) in specify_resource()
8347 if (md.debug_dv) in specify_resource()
8351 specs[count].index = (md.mem_offset.offset >> 3) & 0x3F; in specify_resource()
8353 specs[count].mem_offset.offset = md.mem_offset.offset; in specify_resource()
8354 specs[count++].mem_offset.base = md.mem_offset.base; in specify_resource()
9442 if (md.rot.num_regs > 0 in specify_resource()
9444 && num < 31 + md.rot.num_regs) in specify_resource()
9641 && qp_mutexes[i].path == md.path) in update_qp_mutex()
9650 if (md.debug_dv) in update_qp_mutex()
9662 if (qp_mutexes[i].path == md.path) in update_qp_mutex()
9709 if (md.debug_dv) in clear_qp_mutex()
9737 if (md.debug_dv) in clear_qp_implies()
9768 && qp_implies[i].path == md.path in add_qp_imply()
9780 if (md.debug_dv) in add_qp_imply()
9784 qp_implies[qp_implieslen].path = md.path; in add_qp_imply()
9825 if (md.debug_dv) in add_qp_mutex()
9831 qp_mutexes[qp_mutexeslen].path = md.path; in add_qp_mutex()
9850 if (md.debug_dv) in clear_register_values()
9919 for (i = 32; i < 32 + md.rot.num_regs; i++) in note_register_values()
9957 if (md.debug_dv) in note_register_values()
10018 gr_values[regno].path = md.path; in note_register_values()
10019 if (md.debug_dv) in note_register_values()
10042 gr_values[regno].path = md.path; in note_register_values()
10043 if (md.debug_dv) in note_register_values()
10127 if (md.debug_dv) in resources_match()
10134 if (md.debug_dv) in resources_match()
10168 if (insert_stop && md.num_slots_in_use > 0) in insn_group_break()
10171 if (md.debug_dv) in insn_group_break()
10256 if (md.debug_dv) in print_dependency()
10278 if (md.debug_dv) in instruction_serialization()
10289 if (md.debug_dv) in data_serialization()
10314 if (md.debug_dv) in remove_marked_resource()
10318 if (md.debug_dv) in remove_marked_resource()
10330 md.curr_slot = (md.curr_slot + 1) % NUM_SLOTS; in remove_marked_resource()
10331 if (++md.num_slots_in_use >= NUM_SLOTS) in remove_marked_resource()
10341 if (md.debug_dv) in remove_marked_resource()
10352 md.curr_slot = (md.curr_slot + 1) % NUM_SLOTS; in remove_marked_resource()
10353 if (++md.num_slots_in_use >= NUM_SLOTS) in remove_marked_resource()
10360 if (md.debug_dv) in remove_marked_resource()
10411 for (path = 0; path <= md.path; path++) in check_dependencies()
10459 md.entry_labels[path - 1]); in check_dependencies()
10472 if (md.explicit_mode) in check_dependencies()
10475 if (path < md.path) in check_dependencies()
10485 if (md.debug_dv) in check_dependencies()
10523 if (md.debug_dv) in mark_resources()
10544 count = specify_resource (dep, idesc, DV_REG, specs, note, md.path); in mark_resources()
10549 DEP (opdeps->regs[i]), md.path); in mark_resources()
10566 for (path = 0; path < md.path; path++) in mark_resources()
10600 md.path = 0; in update_dependencies()
10663 if (md.debug_dv) in check_dv()
10701 md.mem_offset.hint = 0; in check_dv()
10725 pdesc = (struct pseudo_opcode *) hash_find (md.pseudo_hash, mnemonic); in md_assemble()
10824 switch (md.hint_b) in md_assemble()
10838 if (md.qp.X_op == O_register) in md_assemble()
10840 qp_regno = md.qp.X_add_number - REG_P; in md_assemble()
10841 md.qp.X_op = O_absent; in md_assemble()
10904 if (md.detect_dv) in md_assemble()
10907 md.curr_slot = (md.curr_slot + 1) % NUM_SLOTS; in md_assemble()
10908 if (++md.num_slots_in_use >= NUM_SLOTS) in md_assemble()
10914 md.last_text_seg = now_seg; in md_assemble()
11084 && !(md.flags & EF_IA_64_ABI64)) in ia64_cons_fix_new()