; GNU Compiler Collection machine description for Intel 16-bit x86. ; Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc. ; Contributed by Rask Ingemann Lambertsen ; ; This file is part of GCC. ; ; GCC is free software; you can redistribute it and/or modify it under the ; terms of the GNU General Public License as published by the Free Software ; Foundation; either version 3 of the License, or (at your option) any ; later version. ; ; GCC is distributed in the hope that it will be useful, but WITHOUT ANY ; WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS ; FOR A PARTICULAR PURPOSE. See the GNU General Public License for more ; details. ; ; You should have received a copy of the GNU General Public License along ; with this program. If not, see . ; A few special hard registers. Keep this list in sync with ia16.h. (define_constants [(C_REG 0) (A_REG 2) (AH_REG 3) (D_REG 4) (B_REG 6) (SI_REG 8) (DI_REG 9) (BP_REG 10) (ES_REG 11) (SP_REG 12) (CC_REG 13) (LAST_HARD_REG 13) (ARGP_REG 14) (UNSPEC_NOT_CARRY 0) ]) ; Mode Iterators ; The modes directly supported on ia16 for most instructions. (define_mode_iterator MO [HI QI]) ; All modes which are no more than 16 bits wide. (define_mode_iterator LE16 [HI QI V2QI]) ; Modes which are exactly 16 bits wide. (define_mode_iterator EQ16 [HI V2QI]) ; Modes which are at least 16 bits wide. Also complex ones. (define_mode_iterator GE16 [HI SI DI TI SF DF SD DD TD V2QI SC DC CHI CSI CDI CTI]) ; Modes which are more that 16 bits wide. (define_mode_iterator GT16 [SI DI TI SF DF SD DD TD SC DC CHI CSI CDI CTI]) ; Integer modes which are more that 16 bits wide. (define_mode_iterator GT16I [SI DI TI]) ; Complex modes excpet CQI, which needs special treatment. (define_mode_iterator COMPLEX [SC DC CHI CSI CDI CTI]) ; Inner mode of a complex value (concat ...). (define_mode_attr INNER [(SC "SF") (DC "DF") (CQI "QI") (CHI "HI") (CSI "SI") (CDI "DI") (CTI "TI")]) (define_mode_attr inner [(SC "sf") (DC "df") (CQI "qi") (CHI "hi") (CSI "si") (CDI "di") (CTI "ti") (V2QI "v2qi") (HI "hi") (SI "si") (DI "di") (TI "ti") (SF "sf") (DF "df") (SD "sd") (DD "dd") (TD "td")]) ; Instruction suffix. (define_mode_attr s [(V2QI "w") (HI "w") (QI "b")]) ; Instruction suffix for next longer mode (for cbtw/cwtd). (define_mode_attr S [(HI "d") (QI "w")]) ; Constraint for "general_operand". (define_mode_attr g [(HI "g") (QI "qmi") (V2QI "g")]) ; Constraint for "register_operand". (define_mode_attr r [(HI "r") (QI "q") (V2QI "r")]) ; The next longer mode. (define_mode_attr LONG [(HI "SI") (QI "HI")]) (define_mode_attr long [(HI "si") (QI "hi")]) ; The input/output registers of idiv/div instructions. (define_mode_attr div_in [(HI "A") (QI "a")]) (define_mode_attr div_out [(HI "a") (QI "Ral")]) (define_mode_attr mod_out [(HI "d") (QI "Rah")]) ; All CCmodes which have at least the zero (Z) flag, carry (C) flag, ; and so on for the other flags and combinations thereof. The SET_* ; iterators are for instructions setting the flags, the USE_* iterators are ; for instructions using the flags. ; Notes: Modes with S but not O are not a subset of those with SO. (define_mode_iterator USE_Z [CCZ CCCZ CCSZ CCSOZ CC]) (define_mode_iterator USE_C [CCC CCCZ CCSC CC]) (define_mode_iterator USE_CZ [CCCZ CCSCZ CC]) (define_mode_iterator USE_S [CCS CCSC CCSZ CCSCZ]) (define_mode_iterator USE_SO [CCSO CCSOZ CC]) (define_mode_iterator USE_SOZ [CCSOZ CC]) (define_mode_iterator SET_CZ [CCC CCZ CCCZ]) (define_mode_iterator SET_SOZ [CCZ CCSO CCSOZ]) (define_mode_iterator SET_SOCZ [CCC CCZ CCCZ CCSO CCSOZ CC]) ; These are only permitted for comparisons against zero. (define_mode_iterator SET_SC [CCC CCS CCSC]) (define_mode_iterator SET_SZ [CCZ CCS CCSZ]) (define_mode_iterator SET_SCZ [CCC CCZ CCS CCCZ CCSC CCSZ CCSCZ]) (define_mode_iterator SET_CC [CCC CCZ CCS CCSO CCCZ CCSC CCSZ CCSOZ CCSCZ CC]) ; Code Iterators ; Allow the same template to generate patterns for signed and unsigned mul/div. ; Idea from mips/mips.md. (define_code_iterator any_extend [sign_extend zero_extend]) ; Pattern name prefix. (define_code_attr np [(sign_extend "") (zero_extend "u")]) ; Instruction mnemonic prefix. (define_code_attr mp [(sign_extend "i") (zero_extend "")]) ; Collapse all of add/sub/and/ior/xor into one pattern. (define_code_iterator any_arith3 [plus minus and ior xor]) (define_code_attr optab_arith3 [(plus "add") (minus "sub") (and "and") (ior "ior") (xor "xor")]) (define_code_attr constr1_arith3 [(plus "%") (minus "") (and "%") (ior "%") (xor "%")]) ; PLUS and MINUS have special patterns. (define_code_iterator any_logic3 [and ior xor]) (define_code_iterator any_nosub3 [plus and ior xor]) ; Collapse add/sub with carry. (define_code_iterator any_addsub [plus minus]) (define_code_attr optab_addsub [(plus "add") (minus "sub")]) (define_code_attr const_addsub [(plus "_const") (minus "")]) ; Collapse all of ashl/ashr/lshr/rotl/rotr into one pattern. (define_code_iterator any_shift [ashift ashiftrt lshiftrt rotate rotatert]) (define_code_iterator any_shiftcc [ashift ashiftrt lshiftrt]) (define_code_iterator any_lshift [ashift lshiftrt]) (define_code_attr optab_shift [(ashift "ashl") (ashiftrt "_ashr") (lshiftrt "lshr") (rotate "rotl") (rotatert "rotr")]) (define_code_attr cond_shift [(ashift "1") (lshiftrt "1") (rotate "1") (rotatert "1") (ashiftrt "!CONST_INT_P (operands[2]) || INTVAL (operands[2]) < GET_MODE_BITSIZE (GET_MODE (operands[0])) - 1")]) ; Collapse the shift by 8 instructions into one pattern. (define_code_iterator any_shift8 [ashift lshiftrt]) (define_code_attr optab_shift8 [(ashift "ashl") (lshiftrt "lshr")]) ; Collapse the whole vector shifts into one pattern. (define_code_iterator any_vecshift3 [ashift lshiftrt]) (define_code_attr optab_vecshift3 [(ashift "shl") (lshiftrt "shr")]) ; Collapse all the b patterns into one. Likewise for *b_rev. (define_code_iterator any_cond [eq ne gt gtu lt ltu ge geu le leu]) (define_code_iterator any_cond_z [eq ne]) (define_code_iterator any_cond_c [geu ltu]) (define_code_iterator any_cond_cz [gtu leu]) (define_code_iterator any_cond_so [ge lt]) (define_code_iterator any_cond_soz [gt le]) ; These are the s patterns available so far. Must not include lt if ; operand 0 has HImode because of a bug in expr.c/convert_move(). (define_code_iterator scc_cond [eq ne gtu ltu geu leu le]) ; Collapse patterns with ior and xor into one. (define_code_iterator any_or [ior xor]) ; Collapse patterns with plus, minus, ior and xor into one. We can get a left ; shift by 8 or an and with 0xff00 for free with these. (define_code_iterator any_cheaphi [plus minus ior xor]) ; Define the instruction mnemonics for the instruction codes. (define_code_attr mnemonic [(plus "add") (minus "sub") (and "and") (ior "or") (xor "xor") (ashift "shl") (ashiftrt "sar") (lshiftrt "shr") (rotate "rol") (rotatert "ror") (eq "je") (ne "jne") (gt "jg") (gtu "ja") (lt "jl") (ltu "jb") (ge "jge") (geu "jae") (le "jle") (leu "jbe")]) ; Reversed branch mnemonics. (define_code_attr mnemonic_rev [(eq "jne") (ne "je") (gt "jle") (gtu "jbe") (lt "jge") (ltu "jae") (ge "jl") (geu "jb") (le "jg") (leu "ja")]) (include "predicates.md") (include "constraints.md") ; Peepholes and associated instructions. (include "ia16-peepholes.md") ; Patterns for instructions which set the condition codes to something ; useful exist in three versions: ; 1) name: Describes the instruction, except that it clobbers the CC reg. ; 2) name_cc: Describes the instruction, including the effect on the CC reg. ; 3) name_cconly: Describes the effect on the CC reg, clobbers the result. ; See for the reason. ; See "13.9 Standard Pattern Names For Generation" ;; Copying values. ; Try to avoid mem->mem copies. This may avoid reload problems. (define_expand "mov" [(set (match_operand:LE16 0 "nonimmediate_operand") (match_operand:LE16 1 "general_operand"))] "" { gcc_assert (!push_operand (operands[0], mode)); if (MEM_P (operands[0]) && MEM_P (operands[1])) operands[1] = force_reg (mode, operands[1]); }) (define_insn "*mov" [(set (match_operand:EQ16 0 "nonimmediate_operand" "=T,rm") (match_operand:EQ16 1 "general_operand" "rm,Ti"))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "movw\t%1,\t%0" ) (define_insn "*movqi" [(set (match_operand:QI 0 "nonimmediate_operand" "=q,m") (match_operand:QI 1 "general_operand" "qmi,qi"))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "movb\t%1,\t%0" ) (define_insn_and_split "movstrictqi" [(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+q,m")) (match_operand:QI 1 "general_operand" "qmi,qi"))] "" "movb\t%1,\t%0" "reload_completed && !register_operand (operands[0], QImode)" [(set (match_dup 0) (match_dup 1))] ) ; The regular xor pattern has inputs and therefore causes ; REG_DEAD notes to be removed if used as output pattern of a peephole2 ; definition. It may also result in weird looking (but correct) instructions ; such as "xorw %sp, %di" after the rnreg pass. This happens following a ; previous "movw %sp, %di" instruction. Use this insn instead. (define_insn "*mov_const0" [(set (match_operand:MO 0 "register_operand" "=") (const_int 0)) (clobber (reg:CC CC_REG))] "" "xor\t%0,\t%0" ) ; We cannot use pre_dec:HI here because then note_stores() doesn't see that the ; stack pointer changes, which in turn breaks reload inheritance correctness. (define_expand "push1" [(parallel [(set (match_dup 1) (match_operand:EQ16 0 "general_operand")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] )] "" { /* We may be passed a constant complex value. */ if (!TARGET_PUSH_IMM && (CONSTANT_P (operands[0]) || (GET_CODE (operands[0]) == CONCAT && (CONSTANT_P (XEXP (operands[0], 0)) || CONSTANT_P (XEXP (operands[0], 1)))))) operands[0] = force_reg (mode, operands[0]); operands[1] = gen_tmp_stack_mem (mode, plus_constant (stack_pointer_rtx, -2)); }) ; This is for the prologue only. (define_expand "_pushhi1_prologue" [(parallel [(set (match_dup 1) (match_operand:HI 0 "general_operand")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] )] "" { operands[1] = gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, -2)); }) ; The template is a dummy. (define_expand "push1" [(set (mem:GT16 (pre_dec:HI (reg:HI SP_REG))) (match_operand:GT16 0 "general_operand" ""))] "" { unsigned int i = GET_MODE_SIZE (mode); rtx tmp; do { i -= 2; tmp = simplify_gen_subreg (HImode, operands[0], mode, i); emit_insn (gen_pushhi1 (tmp)); } while (i > 0); DONE; }) (define_insn "*push1_nonimm" [(set (mem:EQ16 (plus:HI (reg:HI SP_REG) (const_int -2))) (match_operand:EQ16 0 "nonimmediate_operand" "Tm")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] "!TARGET_PUSH_IMM" "pushw\t%0" ) (define_insn "*push1" [(set (mem:EQ16 (plus:HI (reg:HI SP_REG) (const_int -2))) (match_operand:EQ16 0 "general_operand" "Tmi")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] "TARGET_PUSH_IMM" "pushw\t%0" ) ; We can only push the lower QImode registers or memory, and they won't ; be sign extended in doing so. This is unlike constants with *pushihi1. (define_expand "pushqi1" [(parallel [(set (match_dup 1) (match_operand:QI 0 "general_operand")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] )] "" { if (!TARGET_PUSH_IMM && CONSTANT_P (operands[0])) operands[0] = force_reg (QImode, operands[0]); operands[1] = gen_tmp_stack_mem (QImode, plus_constant (stack_pointer_rtx, -2)); }) (define_insn "*pushqi1_subreg" [(set (mem:QI (plus:HI (reg:HI SP_REG) (const_int -2))) (subreg:QI (match_operand 0 "general_operand" "TmIpu") 0)) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] "TARGET_PUSH_IMM || !CONSTANT_P (operands[0])" "pushw\t%0" ) (define_insn "*pushqi1" [(set (mem:QI (plus:HI (reg:HI SP_REG) (const_int -2))) (match_operand:QI 0 "general_operand" "lmIpu")) (set (reg:HI SP_REG) (plus:HI (reg:HI SP_REG) (const_int -2)))] "TARGET_PUSH_IMM || !CONSTANT_P (operands[0])" "pushw\t%X0" ) (define_insn "*pophi1" [(set (match_operand:HI 0 "nonimmediate_operand" "=Tm") (mem:HI (post_inc:HI (reg:HI SP_REG))))] "" "popw\t%0" ) ; This is a hack to tell the register allocator to use AX_REGS instead of ; AL_REGS when operand 0 is a subreg of a HImode register rather than a ; register of QImode. (define_insn "*xlatqi2_subreg" [(set (subreg:QI (match_operand:HI 0 "register_operand" "=a") 0) (mem:QI (plus:HI (zero_extend:HI (match_operand:QI 1 "single_register_operand" "0")) (match_operand:HI 2 "single_nonmemory_operand" "b"))))] "" "xlat\t(%2)" ) (define_insn "*xlatqi2_strict_low_part" [(set (strict_low_part (subreg:QI (match_operand:HI 0 "register_operand" "+a") 0)) (mem:QI (plus:HI (zero_extend:HI (subreg:QI (match_dup 0) 0)) (match_operand:HI 1 "single_nonmemory_operand" "b"))))] "" "xlat\t(%1)" ) (define_insn "*xlatqi2" [(set (match_operand:QI 0 "single_register_operand" "=Ral") (mem:QI (plus:HI (zero_extend:HI (match_operand:QI 1 "single_register_operand" "0")) (match_operand:HI 2 "single_nonmemory_operand" "b"))))] "" "xlat\t(%2)" ) (define_insn_and_split "*xlatqi2_non_strict" [(set (match_operand:QI 0 "register_operand" "=Ral") (match_operand:QI 1 "xlat_memory_operand" "X"))] "!reload_completed" "#" "!reload_completed" [(set (match_dup 0) (match_dup 2)) (set (match_dup 0) (match_dup 3))] { rtx zext = XEXP (XEXP (operands[1], 0), 0); rtx zext_op, bit_offset, new_addr; if (GET_CODE (zext) == SUBREG) zext = SUBREG_REG (zext); switch (GET_CODE (zext)) { case ZERO_EXTEND: zext_op = XEXP (zext, 0); bit_offset = const0_rtx; break; case ZERO_EXTRACT: zext_op = XEXP (zext, 0); bit_offset = XEXP (zext, 2); break; case AND: if (GET_CODE (XEXP (zext, 0)) == SUBREG) { zext_op = XEXP (XEXP (XEXP (zext, 0), 0), 0); bit_offset = XEXP (XEXP (XEXP (zext, 0), 0), 1); } else { zext_op = XEXP (XEXP (zext, 0), 0); bit_offset = XEXP (XEXP (zext, 0), 1); } break; default: gcc_unreachable (); } /* For the benefit of the lower-subreg pass, create a HImode subreg if the * zero_extract operand is larger than HImode. */ if (GET_MODE_SIZE (GET_MODE (zext_op)) > UNITS_PER_WORD) { unsigned int unit_offset; unit_offset = (INTVAL (bit_offset) / BITS_PER_WORD) * UNITS_PER_WORD; zext_op = simplify_gen_subreg (HImode, zext_op, GET_MODE (zext_op), unit_offset); bit_offset = GEN_INT (INTVAL (bit_offset) - unit_offset * BITS_PER_UNIT); } /* Use a SUBREG instead of a ZERO_EXTRACT with offset 0. */ if (INTVAL (bit_offset) == 0) operands[2] = simplify_gen_subreg (QImode, zext_op, GET_MODE (zext_op), 0); else operands[2] = gen_rtx_ZERO_EXTRACT (QImode, zext_op, GEN_INT (8), bit_offset); /* Create the xlat memory operand. */ new_addr = gen_rtx_PLUS (Pmode, gen_rtx_ZERO_EXTEND (Pmode, operands[0]), XEXP (XEXP (operands[1], 0), 1)); operands[3] = replace_equiv_address_nv (operands[1], new_addr); }) ; Combine sometimes doesn't try to split off an outer ZERO_EXTEND, perhaps ; because the combined insn was made up of only two insns. IMHO, this sucks. (define_insn_and_split "*xlatqi2_non_strict_zero_extendqihi" [(set (match_operand:HI 0 "register_operand" "=Ral") (zero_extend:HI (match_operand:QI 1 "xlat_memory_operand" "X"))) (clobber (reg:CC CC_REG))] "!reload_completed" "#" "!reload_completed" [(set (match_dup 3) (match_dup 2)) (set (match_dup 3) (match_dup 4)) (parallel [(set (match_dup 0) (and:HI (match_dup 0) (const_int 255))) (clobber (reg:CC CC_REG))])] { rtx zext = XEXP (XEXP (operands[1], 0), 0); rtx zext_op, bit_offset, new_addr, scratch; if (GET_CODE (zext) == SUBREG) zext = SUBREG_REG (zext); switch (GET_CODE (zext)) { case ZERO_EXTEND: zext_op = XEXP (zext, 0); bit_offset = const0_rtx; break; case ZERO_EXTRACT: zext_op = XEXP (zext, 0); bit_offset = XEXP (zext, 2); break; case AND: if (GET_CODE (XEXP (zext, 0)) == SUBREG) { zext_op = XEXP (XEXP (XEXP (zext, 0), 0), 0); bit_offset = XEXP (XEXP (XEXP (zext, 0), 0), 1); } else { zext_op = XEXP (XEXP (zext, 0), 0); bit_offset = XEXP (XEXP (zext, 0), 1); } break; default: gcc_unreachable (); } /* For the benefit of the lower-subreg pass, create a HImode subreg if the * zero_extract operand is larger than HImode. */ if (GET_MODE_SIZE (GET_MODE (zext_op)) > UNITS_PER_WORD) { unsigned int unit_offset; unit_offset = (INTVAL (bit_offset) / BITS_PER_WORD) * UNITS_PER_WORD; zext_op = simplify_gen_subreg (HImode, zext_op, GET_MODE (zext_op), unit_offset); bit_offset = GEN_INT (INTVAL (bit_offset) - unit_offset * BITS_PER_UNIT); } /* Use a SUBREG instead of a ZERO_EXTRACT with offset 0. */ if (INTVAL (bit_offset) == 0) operands[2] = simplify_gen_subreg (QImode, zext_op, GET_MODE (zext_op), 0); else operands[2] = gen_rtx_ZERO_EXTRACT (QImode, zext_op, GEN_INT (8), bit_offset); /* Create the xlat memory operand. */ scratch = simplify_gen_subreg (QImode, operands[0], HImode, 0); new_addr = gen_rtx_PLUS (Pmode, gen_rtx_ZERO_EXTEND (Pmode, scratch), XEXP (XEXP (operands[1], 0), 1)); operands[4] = replace_equiv_address_nv (operands[1], new_addr); /* Create the destination, which must be a QImode SUBREG. */ if (true || GET_CODE (scratch) == SUBREG) operands[3] = scratch; else operands[3] = gen_rtx_SUBREG (QImode, scratch, 0); }) (define_insn_and_split "*extzv_byte" [(set (match_operand:QI 0 "nonimmediate_operand" "=q,m") (zero_extract:QI (match_operand 1 "nonimmediate_operand" "qm,q") (const_int 8) (match_operand 2 "const_int_operand" "n,n")))] "INTVAL (operands[2]) % BITS_PER_UNIT == 0 && (!MEM_P (operands[0]) || !MEM_P (operands[1]))" "#" "reload_completed" [(set (match_dup 0) (match_dup 3))] { operands[3] = simplify_gen_subreg (QImode, operands[1], GET_MODE (operands[1]), INTVAL (operands[2]) / BITS_PER_UNIT); }) (define_insn_and_split "*extzv_byte_strict_low_part" [(set (strict_low_part (match_operand:QI 0 "register_operand" "+q")) (zero_extract:QI (match_operand 1 "nonimmediate_operand" "qm") (const_int 8) (match_operand 2 "const_int_operand" "n")))] "INTVAL (operands[2]) % BITS_PER_UNIT == 0 && (!MEM_P (operands[0]) || !MEM_P (operands[1]))" "#" "reload_completed" [(set (match_dup 3) (match_dup 4))] { operands[3] = simplify_gen_subreg (QImode, operands[0], QImode, 0); operands[4] = simplify_gen_subreg (QImode, operands[1], GET_MODE (operands[1]), INTVAL (operands[2]) / BITS_PER_UNIT); }) (define_insn "*xchg2" [(set (match_operand:LE16 0 "register_operand" "+%") (match_operand:LE16 1 "nonimmediate_operand" "+m")) (set (match_dup 1) (match_dup 0))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "xchg\t%1,\t%0" ) ;; Arithmetic and logic operations ; Two operand QI/HI/V2QImode add/sub/and/ior/xor. ; Don't generate mem<-mem,mem or mem<-reg,reg insns. (define_expand "3" [(parallel [(set (match_operand:LE16 0 "nonimmediate_operand") (any_arith3:LE16 (match_operand:LE16 1 "nonimmediate_operand") (match_operand:LE16 2 "general_operand"))) (clobber (reg:CC CC_REG))])] "" { rtx tmp; if ((tmp = ia16_prepare_operands (, operands))) { emit_insn (gen_3 (tmp, operands[1], operands[2])); emit_move_insn (operands[0], tmp); DONE; } }) ; Four operand Himode addition using lea, not clobbering/setting CC_REG. (define_insn "*addhi4" [(set (match_operand:HI 0 "register_operand" "=r") (plus:HI (plus:HI (match_operand:HI 1 "single_register_operand" "%w") (match_operand:HI 2 "single_register_operand" "x")) (match_operand:HI 3 "immediate_operand" "i")))] "" "leaw\t%c3(%1,%2),\t%0" ) ; TODO: movw %sp, %bp ; lea 66(%bp),%cx ; TODO: lea 4(%si), %si ; TODO: ; 64: 89 f5 mov %si,%bp ; 66: 8d 72 04 lea 4(%bp,%si),%si ; inc/dec reg/mem is often a little shorter and faster than add $1, reg/mem. ; Note that inc and dec don't set the carry flag. (define_insn "*inc_dec2_cc_" [(set (reg:SET_SZ CC_REG) (compare:SET_SZ (plus:MO (match_operand:MO 1 "nonimmediate_operand" "0,0,0,0") (match_operand:MO 2 "const_int_operand" "M1,P1,M1,P1")) (const_int 0))) (set (match_operand:MO 0 "nonimmediate_operand" "=#*Ral,#*Ral,m,m") (plus:MO (match_dup 1) (match_dup 2)))] "(INTVAL (operands[2]) == 1 || INTVAL (operands[2]) == -1) && ((!MEM_P (operands[0]) && !MEM_P (operands[1])) || rtx_equal_p (operands[0], operands[1]))" "@ subb\t$1,\t%0 addb\t$1,\t%0 dec\t%0 inc\t%0" ) (define_insn "*inc_dec2_cconly_" [(set (reg:SET_SZ CC_REG) (compare:SET_SZ (plus:MO (match_operand:MO 1 "nonimmediate_operand" "0,0,0,0") (match_operand:MO 2 "const_int_operand" "M1,P1,M1,P1")) (const_int 0))) (clobber (match_scratch:MO 0 "=#*Ral,#*Ral,,"))] "(INTVAL (operands[2]) == 1 || INTVAL (operands[2]) == -1) && ((!MEM_P (operands[0]) && !MEM_P (operands[1])) || rtx_equal_p (operands[0], operands[1]))" "@ subb\t$1,\t%0 addb\t$1,\t%0 dec\t%0 inc\t%0" ) ; This works for loops like "for (i = n; i >= 0; i --)". (define_insn "*cmp2_const0_dec2_ccz_c" [(set (reg:CCZ_C CC_REG) (compare:CCZ_C (match_operand:MO 1 "nonimmediate_operand" "0") (const_int 0))) (set (match_operand:MO 0 "nonimmediate_operand" "=rm") (plus:MO (match_dup 1) (const_int -1)))] "(!MEM_P (operands[0]) && !MEM_P (operands[1])) || rtx_equal_p (operands[0], operands[1])" "sub\t$1,\t%0" ) ; Any insn which implements HImode PLUS and may clobber CC_REG. ; w = %bx/%bp, x = %si/%di, B = %bx/%si/%di/%bp, P1 = 1, M1 = -1. ; TODO: 2*inc/dec reg16 is shorter than add/sub $2, reg16. ; There is no point in using "lea" if reloading is needed, thus "!". (define_insn "*addhi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,rm,qm,qm,r,m,!r,!r") (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0,0,0,0,0,*w,*B") (match_operand:HI 2 "general_operand" "P1,M1,Um,Uo,g,ri,*x,i"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (PLUS, operands)" "@ incw\t%0 decw\t%0 decb\t%H0 addb\t%H2,\t%H0 addw\t%2,\t%0 addw\t%2,\t%0 leaw\t(%1,%2),\t%0 leaw\t%c2(%1),\t%0" ) ; Three operand HImode addition using lea, not clobbering/setting CC_REG. ; This is for use by reload when computing stack slot addresses. ; TODO: There should be an alternative for %sp+const or reload will use an ; insn sequence which clobbers CC_REG, which could be a problem in rare cases. (define_insn "*addhi3_lea" [(set (match_operand:HI 0 "register_operand" "=r,r,r") (plus:HI (match_operand:HI 1 "single_register_operand" "w,x,B") (match_operand:HI 2 "single_nonmemory_operand" "x,w,i")))] "reload_in_progress || reload_completed" "@ leaw\t(%1,%2),\t%0 leaw\t(%2,%1),\t%0 leaw\t%c2(%1),\t%0" ) ; Any insn which implements QImode PLUS and may clobber CC_REG. ; P1 = 1, M1 = -1, A = %al. (define_insn "*addqi3" [(set (match_operand:QI 0 "nonimmediate_operand" "=#*Ral,#*Ral,qm,qm,q,m") (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,0,0,0,0") (match_operand:QI 2 "general_operand" "P1,M1,P1,M1,qmi,qi"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (PLUS, operands)" "@ addb\t%2,\t%0 addb\t%2,\t%0 incb\t%0 decb\t%0 addb\t%2,\t%0 addb\t%2,\t%0" ) ; TODO: Split this. (define_insn "*subqi3_zero_extendqihi" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,m") (minus:HI (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0,0")) (zero_extend:HI (match_operand:QI 2 "general_operand" "qmi,qi")))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (MINUS, operands)" "@ subb\t%2,\t%L0\;sbbb\t%H0,\t%H0 movb\t$0,\t%H0\;subb\t%2,\t%L0\;sbbb\t$0,\t%H0" ) ; This defines MINUS for QImode and HImode. (define_insn "*sub3" [(set (match_operand:MO 0 "nonimmediate_operand" "=m,,m") (minus:MO (match_operand:MO 1 "nonimmediate_or_0_operand" "Z,0,0") (match_operand:MO 2 "general_operand" "0,,i"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (MINUS, operands)" "@ neg\t%0 sub\t%2,\t%0 sub\t%2,\t%0" ) (define_insn "*sub3_cc_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (minus:MO (match_operand:MO 1 "nonimmediate_or_0_operand" "Z,0,0") (match_operand:MO 2 "general_operand" "0,,i")) (const_int 0))) (set (match_operand:MO 0 "nonimmediate_operand" "=m,,m") (minus:MO (match_dup 1) (match_dup 2)))] "ia16_arith_operands_p (MINUS, operands)" "@ neg\t%0 sub\t%2,\t%0 sub\t%2,\t%0" ) ; FIXME: Should we have this at all? It should be expressed as comparison ; of the two operands directly. (define_insn "*sub3_cconly_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (minus:MO (match_operand:MO 1 "nonimmediate_or_0_operand" "Z,0,0") (match_operand:MO 2 "nonimmediate_operand" "0,,i")) (const_int 0))) (clobber (match_scratch:MO 0 "=,,"))] "ia16_arith_operands_p (MINUS, operands)" "@ neg\t%0 sub\t%2,\t%0 sub\t%2,\t%0" ) ; This defines insns for the AND code, modes HI and V2QI. ; andw $0xff7f, reg/mem is bad, 16-bit constant (can't sign extend). ; andb $0x7f, reg/mem is good, 8-bit constant. ; andb $0x80, reg is bad, needs QI_REGS register. ; andw $0xff80, reg is good, accepts GENERAL_REGS register. ; andw $0xff80, mem is bad, 16-bit memory access to modify lower 8 bits. ; andb $0x80, mem is good, only 8-bit memory access. (define_insn "*and3" [(set (match_operand:EQ16 0 "nonimmediate_operand" "=q,m,q,m,r,m,qm,r,m") (and:EQ16 (match_operand:EQ16 1 "nonimmediate_operand" "%0,0,0,0,0,0,0,0,0") (match_operand:EQ16 2 "general_operand" "Um,Um,Lbm,Lbm,Lar,Lam,Ua,g,ri"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (AND, operands)" "@ xorb\t%L0,\t%L0 movb\t$0,\t%L0 xorb\t%H0,\t%H0 movb\t$0,\t%H0 andw\t%2,\t%0 andb\t%L2,\t%L0 andb\t%H2,\t%H0 andw\t%2,\t%0 andw\t%2,\t%0" ) ; This defines insns for the IOR code, modes HI and V2QI. (define_insn "*ior3" [(set (match_operand:EQ16 0 "nonimmediate_operand" "=qm,qm,q,m,qm,r,m") (ior:EQ16 (match_operand:EQ16 1 "nonimmediate_operand" "%0,0,0,0,0,0,0") (match_operand:EQ16 2 "general_operand" "Lbm,Um,Lor,Lom,Uo,g,ri"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (IOR, operands)" "@ movb\t$255,\t%L0 movb\t$255,\t%H0 orb\t%L2,\t%L0 orb\t%L2,\t%L0 orb\t%H2,\t%H0 orw\t%2,\t%0 orw\t%2,\t%0" ) ; This defines insns for the XOR code, modes HI and V2QI. (define_insn "*xor3" [(set (match_operand:EQ16 0 "nonimmediate_operand" "=qm,qm,q,m,qm,r,m") (xor:EQ16 (match_operand:EQ16 1 "nonimmediate_operand" "%0,0,0,0,0,0,0") (match_operand:EQ16 2 "general_operand" "Lbm,Um,Lor,Lom,Uo,g,ri"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (XOR, operands)" "@ notb\t%L0 notb\t%H0 xorb\t%L2,\t%L0 xorb\t%L2,\t%L0 xorb\t%H2,\t%H0 xorw\t%2,\t%0 xorw\t%2,\t%0" ) ; This defines insns for the AND, IOR and XOR codes, mode QI. ; This used to have % for operand 1 but reload died compiling something. (define_insn "*qi3" [(set (match_operand:QI 0 "nonimmediate_operand" "=q,m,q,m") (any_logic3:QI (match_operand:QI 1 "nonimmediate_operand" "0,0,qmi,qi") (match_operand:QI 2 "general_operand" "qmi,qi,0,0"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "@ b\t%2,\t%0 b\t%2,\t%0 b\t%1,\t%0 b\t%1,\t%0" ) ; Define insns for PLUS, AND, IOR and XOR codes combined with a comparison ; for HImode and QImode. (define_insn "*3_cc_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (any_nosub3:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0") (match_operand:MO 2 "general_operand" ",i")) (const_int 0))) (set (match_operand:MO 0 "nonimmediate_operand" "=,m") (any_nosub3:MO (match_dup 1) (match_dup 2)))] "ia16_arith_operands_p (, operands)" "\t%2,\t%0" ) ; A special case of AND + COMPARE which doesn't need a scratch register. (define_insn "*and3_cconly__noclobber" [(set (reg:SET_CC CC_REG) (compare:SET_CC (and:MO (match_operand:MO 0 "nonimmediate_operand" "%qm,qm,,m") (match_operand:MO 1 "general_operand" "UmUo,LbmLom,,i")) (const_int 0)))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "@ testb\t%H1,\t%H0 testb\t%L1,\t%L0 test\t%1,\t%0 test\t%1,\t%0" ) (define_insn "*3_cconly_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (any_nosub3:MO (match_operand:MO 1 "nonimmediate_operand" "%0") (match_operand:MO 2 "general_operand" "")) (const_int 0))) (clobber (match_scratch:MO 0 "="))] "ia16_arith_operands_p (, operands)" "\t%2,\t%0" ) ; Combine creates such insns. Get rid of the clobber and the scratch register. (define_split [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (plus:MO (match_operand:MO 0 "nonimmediate_operand") (match_operand:MO 1 "const_int_operand")) (const_int 0))) (clobber (scratch:MO))] "INTVAL (operands[1]) != -32768" [(set (reg:CC CC_REG) (compare:CC (match_dup 0) (match_dup 2)))] { operands[2] = GEN_INT (-INTVAL (operands[1])); }) ; Shifts and zero extends combined with bitwise logic or arithmetic. ; Mostly from newlib/libc/stdlib/ldtoa.c. (define_insn "*lshrhi_const8_hi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,qm") (any_or:HI (lshiftrt:HI (match_operand:HI 2 "nonimmediate_operand" "qm,q") (const_int 8)) (match_operand:HI 1 "nonimmediate_operand" "0,0"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "b\t%H2,\t%L0" ) (define_insn "*zero_extendqihi_hi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,qm") (any_or:HI (zero_extend:HI (match_operand:QI 2 "nonimmediate_operand" "qm,q")) (match_operand:HI 1 "nonimmediate_operand" "0,0"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "b\t%2,\t%L0" ) ; TODO: These two set all flags except Z correctly. (define_insn "*ashlhi_const8_hi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,qm") (any_cheaphi:HI (ashift:HI (match_operand:HI 2 "nonimmediate_operand" "qm,q") (const_int 8)) (match_operand:HI 1 "nonimmediate_operand" "0,0"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "b\t%L2,\t%H0" ) ; From testsuite/gcc.dg/tree-ssa/gen-vect-25.c (define_insn "*vector_mergehi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,qm") (ior:HI (ashift:HI (match_operand:HI 2 "general_operand" "qm,q") (const_int 8)) (zero_extend:HI (match_operand:QI 1 "general_operand" "0,0")))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (IOR, operands)" "movb\t%L2,\t%H0" ) (define_insn "*andhi_constm256_hi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=q,qm") (any_cheaphi:HI (and:HI (match_operand:HI 2 "nonimmediate_operand" "qm,q") (const_int -256)) (match_operand:HI 1 "nonimmediate_operand" "0,0"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "b\t%H2, %H0" ) ; TODO: This is from umoddi3, try to figure out what is missing for combine. ; a99: 03 86 7a ff add -134(%bp),%ax ; a9d: 13 96 7c ff adc -132(%bp),%dx ; aa1: 39 96 7c ff cmp %dx,-132(%bp) ; Insn patterns used in multiword addition. (define_insn "_add3_cc_for_carry" [(set (reg:CCC CC_REG) (compare:CCC (plus:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0,0") (match_operand:MO 2 "general_operand" "Z,,i")) (match_dup 1))) (set (match_operand:MO 0 "nonimmediate_operand" "=m,,m") (plus:MO (match_dup 1) (match_dup 2)))] "ia16_arith_operands_p (PLUS, operands)" "@ clc add\t%2,\t%0 add\t%2,\t%0" ) (define_insn "*add3_cconly_for_carry" [(set (reg:CCC CC_REG) (compare:CCC (plus:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0") (match_operand:MO 2 "general_operand" "Z,")) (match_dup 1))) (clobber (match_scratch:MO 0 "=X,"))] "ia16_arith_operands_p (PLUS, operands)" "@ clc add\t%2,\t%0" ) (define_insn "_add3_carry" [(set (match_operand:MO 0 "nonimmediate_operand" "=,m") (plus:MO (minus:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0") (match_operand:MO 3 "carry_flag_operator")) (match_operand:MO 2 "general_operand" ",i"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (PLUS, operands)" "adc\t%2,\t%0" ) ; This helps combine create more complex patterns. (define_insn_and_split "*inccc3_carry" [(set (match_operand:MO 0 "nonimmediate_operand" "=,m") (minus:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0") (match_operand:MO 2 "carry_flag_operator"))) (clobber (reg:CC CC_REG))] "!MEM_P (operands[0]) || !MEM_P (operands[1]) || can_create_pseudo_p () || rtx_equal_p (operands[0], operands[1])" "#" "&& 1" [(parallel [(set (match_dup 0) (plus:MO (minus:MO (match_dup 1) (match_dup 2)) (const_int 0))) (clobber (reg:CC CC_REG))] )] { if (memory_operand (operands[1], mode) && !rtx_equal_p (operands[0], operands[1])) operands[1] = force_reg (mode, operands[1]); }) (define_insn "_add3_carry_cc_for_carry" [(set (reg:CCC CC_REG) (compare:CCC (plus:MO (minus:MO (match_operand:MO 1 "nonimmediate_operand" "%0,0") (match_operand:MO 3 "carry_flag_operator")) (match_operand:MO 2 "general_operand" ",i")) (match_dup 2))) (set (match_operand:MO 0 "nonimmediate_operand" "=,m") (plus:MO (minus:MO (match_dup 1) (match_dup 3)) (match_dup 2)))] "ia16_arith_operands_p (PLUS, operands)" "adc\t%2,\t%0" ) ; Insn patterns used in multiword subtraction. (define_insn "_sub3_cc_for_carry" [(set (reg:CCCZ_NC CC_REG) (compare:CCCZ_NC (minus:MO (match_operand:MO 1 "nonimmediate_or_0_operand" "0,Z,0,0") (match_operand:MO 2 "general_operand" "Z,0,,i")) (match_dup 1))) (set (match_operand:MO 0 "nonimmediate_operand" "=m,m,,m") (minus:MO (match_dup 1) (match_dup 2)))] "ia16_arith_operands_p (MINUS, operands)" "@ clc neg\t%0 sub\t%2,\t%0 sub\t%2,\t%0" ) (define_insn "*sub3_cconly_for_carry" [(set (reg:CCCZ_NC CC_REG) (compare:CCCZ_NC (minus:MO (match_operand:MO 1 "nonimmediate_or_0_operand" "0,Z,0") (match_operand:MO 2 "general_operand" "Z,0,")) (match_dup 1))) (clobber (match_scratch:MO 0 "=X,,"))] "ia16_arith_operands_p (MINUS, operands)" "@ clc neg\t%0 sub\t%2,\t%0" ) (define_insn "_sub3_carry" [(set (match_operand:MO 0 "nonimmediate_operand" "=,m") (minus:MO (plus:MO (match_operand:MO 3 "carry_flag_operator") (match_operand:MO 1 "nonimmediate_operand" "0,0")) (match_operand:MO 2 "general_operand" ",i"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (MINUS, operands)" "sbb\t%2,\t%0" ) ; Splitter to help combine create more complex patterns. (define_insn_and_split "*deccc_carry" [(set (match_operand:MO 0 "nonimmediate_operand" "=m") (plus:MO (match_operand:MO 2 "carry_flag_operator") (match_operand:MO 1 "nonimmediate_operand" "0"))) (clobber (reg:CC CC_REG))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "#" "" [(parallel [(set (match_dup 0) (minus:MO (plus:MO (match_dup 2) (match_dup 1)) (const_int 0))) (clobber (reg:CC CC_REG))] )] ) (define_insn "_sub3_carry_cc_for_carry" [(set (reg:CCCZ_NC CC_REG) (compare:CCCZ_NC (minus:MO (plus:MO (match_operand:MO 3 "carry_flag_operator") (match_operand:MO 1 "nonimmediate_operand" "0,0")) (match_operand:MO 2 "general_operand" ",i")) (plus:MO (match_dup 3) (match_dup 1)))) (set (match_operand:MO 0 "nonimmediate_operand" "=,m") (minus:MO (plus:MO (match_dup 3) (match_dup 1)) (match_dup 2)))] "ia16_arith_operands_p (MINUS, operands)" "sbb\t%2,\t%0" ) ; Multiword addition and subtraction. ; This is not optimum for some constants (e.g. 0x80000000), but still ; lots better than GCC's generic attempt to synthesize addsi3. (define_expand "3" [(parallel [(set (match_operand:GT16I 0 "nonimmediate_operand") (any_addsub:GT16I (match_operand:GT16I 1 "general_operand") (match_operand:GT16I 2 "general_operand"))) (clobber (reg:CC CC_REG))] )] "" { rtx tmp; if ( == MINUS && CONSTANT_P (operands[1])) operands[1] = force_reg (mode, operands[1]); if ((tmp = ia16_prepare_operands (, operands))) { emit_insn (gen_3 (tmp, operands[1], operands[2])); emit_move_insn (operands[0], tmp); DONE; } }) (define_insn_and_split "*3" [(set (match_operand:GT16I 0 "nonimmediate_operand" "=r,m") (any_addsub:GT16I (match_operand:GT16I 1 "nonimmediate_operand" "0,0") (match_operand:GT16I 2 "general_operand" "g,ri"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (, operands)" "#" "" [(const_int 0)] { /* Avoid a warning: GET_MODE_SIZE(mode) is signed. */ signed int i; rtx op0, op1, op2; enum machine_mode cc_mode = ( == PLUS) ? CCCmode : CCCZ_NCmode; enum rtx_code overflow_code = ( == PLUS) ? LTU : GTU; rtx cc_reg = gen_rtx_REG (cc_mode, CC_REG); rtx cc_op = gen_rtx_fmt_ee (overflow_code, HImode, cc_reg, const0_rtx); /* The least significant word (LSW). */ op0 = simplify_gen_subreg (HImode, operands[0], mode, 0); op1 = simplify_gen_subreg (HImode, operands[1], mode, 0); op2 = simplify_gen_subreg (HImode, operands[2], mode, 0); emit_insn (gen__hi3_cc_for_carry (op0, op1, op2)); /* The words between the LSW and MSW. */ for (i = UNITS_PER_WORD; i < GET_MODE_SIZE (mode) - UNITS_PER_WORD; i += UNITS_PER_WORD) { op0 = simplify_gen_subreg (HImode, operands[0], mode, i); op1 = simplify_gen_subreg (HImode, operands[1], mode, i); op2 = simplify_gen_subreg (HImode, operands[2], mode, i); emit_insn (gen__hi3_carry_cc_for_carry (op0, op1, op2, cc_op)); } /* The most significant word (MSW). */ op0 = simplify_gen_subreg (HImode, operands[0], mode, i); op1 = simplify_gen_subreg (HImode, operands[1], mode, i); op2 = simplify_gen_subreg (HImode, operands[2], mode, i); emit_insn (gen__hi3_carry (op0, op1, op2, cc_op)); DONE; }) ; Vector addition and subtraction. (define_insn_and_split "*addv2qi3" [(set (match_operand:V2QI 0 "nonimmediate_operand" "=qm") (plus:V2QI (match_operand:V2QI 1 "nonimmediate_operand" "%0") (match_operand:V2QI 2 "general_operand" "qmi"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (PLUS, operands)" "#" "reload_completed" [(parallel [ (set (match_dup 3) (plus:QI (match_dup 3) (match_dup 4))) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 5) (plus:QI (match_dup 5) (match_dup 6))) (clobber (reg:CC CC_REG)) ])] { operands[3] = simplify_gen_subreg (QImode, operands[0], V2QImode, 0); operands[4] = simplify_gen_subreg (QImode, operands[2], V2QImode, 0); operands[5] = simplify_gen_subreg (QImode, operands[0], V2QImode, 1); operands[6] = simplify_gen_subreg (QImode, operands[2], V2QImode, 1); }) (define_insn_and_split "*subv2qi3" [(set (match_operand:V2QI 0 "nonimmediate_operand" "=qm,qm") (minus:V2QI (match_operand:V2QI 1 "nonimmediate_or_0_operand" "Z,0") (match_operand:V2QI 2 "general_operand" "0,qmi"))) (clobber (reg:CC CC_REG))] "ia16_arith_operands_p (MINUS, operands)" "#" "reload_completed" [(parallel [ (set (match_dup 3) (minus:QI (match_dup 4) (match_dup 5))) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 6) (minus:QI (match_dup 7) (match_dup 8))) (clobber (reg:CC CC_REG)) ])] { operands[3] = simplify_gen_subreg (QImode, operands[0], V2QImode, 0); operands[6] = simplify_gen_subreg (QImode, operands[0], V2QImode, 1); /* We have to check which operands are matched and which ones are not. */ if (operands[1] == CONST0_RTX (V2QImode)) { operands[4] = simplify_gen_subreg (QImode, operands[1], V2QImode, 0); operands[5] = operands[3]; operands[7] = simplify_gen_subreg (QImode, operands[1], V2QImode, 1); operands[8] = operands[6]; } else { operands[4] = operands[3]; operands[5] = simplify_gen_subreg (QImode, operands[2], V2QImode, 0); operands[7] = operands[6]; operands[8] = simplify_gen_subreg (QImode, operands[2], V2QImode, 1); } }) ; The template is a dummy. (define_expand "negv2qi2" [(set (match_operand:V2QI 0 "nonimmediate_operand") (minus:V2QI (const_vector:V2QI [(const_int 0) (const_int 0)]) (match_operand:V2QI 1 "general_operand")))] "" { emit_insn (gen_subv2qi3 (operands[0], CONST0_RTX (V2QImode), operands[1])); DONE; }) ; Whole vector shifts. (define_expand "vec__v2qi" [(set (match_operand:V2QI 0 "nonimmediate_operand") (any_vecshift3:V2QI (match_operand:V2QI 1 "general_operand") (match_operand:QI 2 "nonmemory_operand")))] "" { gen_hi3 (simplify_gen_subreg (HImode, operands[0], V2QImode, 0), simplify_gen_subreg (HImode, operands[1], V2QImode, 0), operands[2]); DONE; }) ; aad imm: al = imm * ah + al, ah = 0 ; For the NEC V20/V30, imm is fixed at 10, thus "Iaa" instead of "i". ; CC isn't really clobbered. It is set according to the result in al. (define_insn "*muladdqi4" [(set (match_operand:QI 0 "register_operand" "=Ral") (plus:QI (mult:QI (match_operand:QI 1 "register_operand" "+%Rah") (match_operand:QI 2 "immediate_operand" "Iaa")) (match_operand:QI 3 "register_operand" "0"))) (set (match_dup 1) (const_int 0)) (clobber (reg:CC CC_REG))] "TARGET_AAD_IMM || INTVAL (operands[2]) == 10" "aad\t%2" ) (define_insn "*muladdqi4_clobber" [(set (match_operand:QI 0 "register_operand" "=Ral") (plus:QI (mult:QI (match_operand:QI 1 "register_operand" "%Rah") (match_operand:QI 2 "immediate_operand" "Iaa")) (match_operand:QI 3 "register_operand" "0"))) (clobber (match_scratch:QI 4 "=1")) (clobber (reg:CC CC_REG))] "TARGET_AAD_IMM || INTVAL (operands[2]) == 10" "aad\t%2" ) (define_insn "*muladdqi4_zero_extendqihi" [(set (match_operand:HI 0 "register_operand" "=a") (zero_extend:HI (plus:QI (mult:QI (match_operand:QI 1 "register_operand" "%Rah") (match_operand:QI 2 "immediate_operand" "Iaa")) (match_operand:QI 3 "register_operand" "Ral")))) (clobber (match_scratch:QI 4 "=1")) (clobber (match_scratch:QI 5 "=3")) (clobber (reg:CC CC_REG))] "TARGET_AAD_IMM || INTVAL (operands[2]) == 10" "aad\t%2" ) ; Two or three operand QImode multiplication. ; TODO: Use whatever is cheapest of imulb or mulb for the chosen cpu. (define_insn "mulqi3" [(set (match_operand:QI 0 "register_operand" "=Ral,Ral") (mult:QI (match_operand:QI 1 "register_operand" "%Rah,0") (match_operand:QI 2 "general_operand" "Iaa,qm"))) (clobber (match_scratch:QI 3 "=1,Rah")) (clobber (reg:CC CC_REG))] "" "@ xorb\t%0,\t%0\;aad\t%2 mulb\t%2" ) ; Two or three operand HImode multiplication. Use temporary pseudos to ; work around the tight constraints of *mulhi3. (define_expand "mulhi3" [(set (match_dup 5) (match_operand:HI 1 "nonimmediate_operand")) (parallel [ (set (match_dup 4) (mult:HI (match_dup 5) (match_operand:HI 2 "general_operand"))) (clobber (match_dup 3)) (clobber (reg:CC CC_REG)) ]) (set (match_operand:HI 0 "register_operand") (match_dup 4))] "" { if (TARGET_IMUL_IMM && CONSTANT_P (operands[2])) operands[3] = gen_rtx_SCRATCH (HImode); else operands[3] = gen_rtx_REG (HImode, D_REG); operands[4] = gen_reg_rtx (HImode); operands[5] = gen_reg_rtx (HImode); }) (define_insn "*mulhi3_const" [(set (match_operand:HI 0 "register_operand" "=q,r") (mult:HI (match_operand:HI 1 "nonimmediate_operand" "%0,rm") (match_operand:HI 2 "immediate_operand" "I11,sImu"))) (clobber (match_scratch:HI 3 "=X,X")) (clobber (reg:CC CC_REG))] "TARGET_IMUL_IMM" "@ addb\t%L0,\t%H0 imulw\t%2,\t%1,\t%0" ) ; We use general_operand rather than nonimmediate_operand even without ; TARGET_IMUL_IMM because reload can load the constant into a register, ; which the constraints accept. ; TODO: Use whatever is cheapest of imulw or mulw for the chosen cpu. (define_insn "*mulhi3" [(set (match_operand:HI 0 "single_register_operand" "=q,r,a") (mult:HI (match_operand:HI 1 "single_register_operand" "%0,r,0") (match_operand:HI 2 "general_operand" "I11,Imu,rm"))) (clobber (match_scratch:HI 3 "=X,X,d")) (clobber (reg:CC CC_REG))] "" "@ addb\t%L0,\t%H0 imulw\t%2,\t%1,\t%0 mulw\t%2" ) ; The middle end can synthesize mulsi3 from mulhi3 and (u)mulhisi3. ; One operand expanding HImode signed/unsigned multiplication. (define_expand "mulhisi3" [(set (match_dup 3) (match_operand:HI 1 "register_operand")) (parallel [ (set (match_operand:SI 0 "register_operand") (mult:SI (any_extend:SI (match_dup 3)) (any_extend:SI (match_operand:HI 2 "nonimmediate_operand")))) (clobber (reg:CC CC_REG))] )] "" { operands[3] = gen_reg_rtx (HImode); }) (define_insn "*mulhisi3" [(set (match_operand:SI 0 "register_operand" "=A") (mult:SI (any_extend:SI (match_operand:HI 1 "single_register_operand" "%0")) (any_extend:SI (match_operand:HI 2 "nonimmediate_operand" "rm")))) (clobber (reg:CC CC_REG))] "" "mulw\t%2" ) ; One operand expanding QImode signed/unsigned multiplication. (define_insn "mulqihi3" [(set (match_operand:HI 0 "register_operand" "=a") (mult:HI (any_extend:HI (match_operand:QI 1 "register_operand" "%0")) (any_extend:HI (match_operand:QI 2 "nonimmediate_operand" "rm")))) (clobber (reg:CC CC_REG))] "" "mulb\t%2" ) ; One operand QImode division by a constant. ; aam imm: ah = al / imm, al = al % imm. ; This should work on the NEC V20/V30, unlike aad. ; TODO: SF, ZF and PF are set according to the remainder in %al of the division. (define_insn "*divmodqi4_const" [(set (match_operand:QI 0 "register_operand" "=Rah") (udiv:QI (match_operand:QI 1 "register_operand" "Ral") (match_operand:QI 2 "immediate_operand" "i"))) (set (match_operand:QI 3 "register_operand" "=1") (umod:QI (match_dup 1) (match_dup 2))) (clobber (reg:CC CC_REG))] "" "aam\t%2" ) ; One operand QI/HImode signed division. (define_insn "divmod4" [(set (match_operand:MO 0 "register_operand" "=") (div:MO (match_operand:MO 1 "register_operand" "0") (match_operand:MO 2 "nonimmediate_operand" "m"))) (set (match_operand:MO 3 "register_operand" "=&") (mod:MO (match_dup 1) (match_dup 2))) (clobber (reg:CC CC_REG))] "" "ct\;idiv\t%2" ) ; One operand QI/HImode unsigned division. (define_insn "udivmod4" [(set (match_operand:MO 0 "register_operand" "=") (udiv:MO (match_operand:MO 1 "register_operand" "0") (match_operand:MO 2 "nonimmediate_operand" "m"))) (set (match_operand:MO 3 "register_operand" "=&") (umod:MO (match_dup 1) (match_dup 2))) (clobber (reg:CC CC_REG))] "" "xor\t%3,\t%3\;div\t%2" ) ;; Shifts and rotates. ; Two operand QI/HImode arithmetic/logic shifts and rotates. ; Two constant arithmetic right shifts need special magic to work around ; reload problems with multiword registers and tight constraints. (define_expand "ashrhi3" [(set (match_dup 4) (match_operand:HI 1 "nonimmediate_operand")) (set (match_dup 3) (ashiftrt:HI (match_dup 4) (match_operand:QI 2 "nonmemory_operand"))) (set (match_operand:HI 0 "nonimmediate_operand") (match_dup 3))] "" { if (CONST_INT_P (operands[2]) && (INTVAL (operands[2]) >= 15 || INTVAL (operands[2]) == 8)) { operands[3] = gen_reg_rtx (HImode); operands[4] = gen_reg_rtx (HImode); if (INTVAL (operands[2]) > 15) operands[2] = GEN_INT (15); } else { emit_insn (gen__ashrhi3 (operands[0], operands[1], operands[2])); DONE; } }) (define_expand "ashrqi3" [(set (match_dup 4) (match_operand:QI 1 "nonimmediate_operand")) (set (match_dup 3) (ashiftrt:QI (match_dup 4) (match_operand:QI 2 "nonmemory_operand"))) (set (match_operand:QI 0 "nonimmediate_operand") (match_dup 3))] "" { if (! CONST_INT_P (operands[2]) || INTVAL (operands[2]) < 7) { emit_insn (gen__ashrqi3 (operands[0], operands[1], operands[2])); DONE; } operands[3] = gen_reg_rtx (QImode); operands[4] = gen_reg_rtx (QImode); if (INTVAL (operands[2]) > 7) operands[2] = GEN_INT (7); }) (define_insn "*ashrhi3_const15" [(set (match_operand:HI 0 "single_register_operand" "=d") (ashiftrt:HI (match_operand:HI 1 "single_register_operand" "a") (const_int 15)))] "" "cwtd" ) (define_insn_and_split "*ashrhi3_const8" [(set (match_operand:HI 0 "single_register_operand" "=a") (ashiftrt:HI (match_operand:HI 1 "general_operand" "qmi") (const_int 8)))] "" "#" "reload_completed" [(set (match_dup 2) (match_dup 3)) (set (match_dup 0) (sign_extend:HI (match_dup 2)))] { operands[2] = simplify_gen_subreg (QImode, operands[0], HImode, 0); operands[3] = simplify_gen_subreg (QImode, operands[1], HImode, 1); }) (define_insn "*lshrhi3_const8_mulhi3_const257" [(set (match_operand:HI 0 "register_operand" "=q") (mult:HI (lshiftrt:HI (match_operand:HI 1 "register_operand" "0") (const_int 8)) (const_int 257)))] "" "movb\t%H0,\t%L0" ) ; TODO: This should be a splitter. (define_insn "*lshrhi_const8_andhi_const255" [(set (match_operand:HI 0 "register_operand" "=q") (and:HI (lshiftrt:HI (match_operand:HI 1 "general_operand" "qmi") (const_int 8)) (const_int 255))) (clobber (reg:CC CC_REG))] "" "movb\t%H1,\t%L0\;xorb\t%H0,\t%H0" ) (define_insn "*ashrqi3_const7" [(set (match_operand:QI 0 "single_register_operand" "=Rah") (ashiftrt:QI (match_operand:QI 1 "single_register_operand" "Ral") (const_int 7)))] "" "cbtw" ) (define_insn "*ashrqi3_const7_extendqihi" [(set (match_operand:HI 0 "single_register_operand" "=d") (sign_extend:HI (ashiftrt:QI (match_operand:QI 1 "single_register_operand" "Rah") (const_int 7))))] "" "cwtd" ) (define_insn "*cmphi2_const0_ccs_nc" [(set (reg:CCS_NC CC_REG) (compare:CCS_NC (match_operand:HI 0 "nonimmediate_operand" "qm,r") (const_int 0)))] "" "@ cmpb\t$0x80,\t%H0 cmpw\t$0x8000,\t%0" ) (define_insn_and_split "*lshrhi3_const15" [(set (match_operand:HI 0 "register_operand" "=d,r,r,r") (lshiftrt:HI (match_operand:HI 1 "nonimmediate_operand" "a,0,qm,r") (const_int 15))) (clobber (reg:CC CC_REG))] "" "@ cwtd\;negw\t%0 shlw\t%0\;sbbw\t%0,\t%0\;negw\t%0 # #" "reload_completed && (REGNO (operands[0]) != D_REG || !REG_P (operands[1]) || REGNO (operands[1]) != A_REG) && !rtx_equal_p (operands[0], operands[1])" [(set (reg:CCS_NC CC_REG) (compare:CCS_NC (match_dup 1) (const_int 0))) (parallel [ (set (match_dup 0) (ge:HI (reg:CCS_NC CC_REG) (const_int 0))) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 0) (plus:HI (match_dup 0) (const_int 1))) (clobber (reg:CC CC_REG))] )] ) ; For newlib/libc/stdlib/ldtoa.c. (define_insn "*lshrhi_const15_addhi3" [(set (match_operand:HI 0 "nonimmediate_operand" "=r,rm,rm") (plus:HI (lshiftrt:HI (match_operand:HI 2 "nonimmediate_operand" "0,qm,r") (const_int 15)) (match_operand:HI 1 "nonimmediate_operand" "0,0,0"))) (clobber (reg:CC CC_REG))] "" "@ sarw\t%0\;rclw\t%0\;adcw\t$0,\t%0 cmpb\t$0x80,\t%H2\;cmc\;adcw\t$0,\t%0 cmpw\t$0x8000,\t%2\;cmc\;adcw\t$0,\t%0" ) ; (define_split_and_insn ...) anyone? (define_split [(set (match_operand:HI 0 "register_operand") (rotate:HI (match_operand:HI 1 "register_operand") (const_int 8))) (clobber (reg:CC CC_REG))] "!reload_completed" [(set (match_dup 0) (rotate:HI (match_dup 1) (const_int 8)))] "" ) (define_insn "*rotlhi3_const8" [(set (match_operand:HI 0 "register_operand" "=q") (rotate:HI (match_operand:HI 1 "register_operand" "0") (const_int 8)))] "" "xchgb\t%L0,\t%H0" ) ; The other shifts may need to use the %cl register. Use the single_register ; trick here too. Note: expands into _ashr instead of ashr. (define_expand "3" [(parallel [ (set (match_operand:MO 0 "nonimmediate_operand") (any_shift:MO (match_operand:MO 1 "nonimmediate_operand") (match_operand:QI 2 "nonmemory_operand"))) (clobber (reg:CC CC_REG))] )] "" { rtx tmp; if ((tmp = ia16_prepare_operands (, operands))) { emit_insn (gen_3 (tmp, operands[1], operands[2])); emit_move_insn (operands[0], tmp); DONE; } if (operands[2] != const1_rtx && !TARGET_SHIFT_IMM && !REG_P (operands[2])) operands[2] = force_reg (QImode, operands[2]); }) ; Note: expands into _ashr instead of ashr. ; C is %cl and Ish is any imm if i80186+, else 1. (define_insn "*3_subreg" [(set (match_operand:MO 0 "nonimmediate_operand" "=m") (any_shift:MO (match_operand:MO 1 "nonimmediate_operand" "0") (subreg:QI (match_operand:MO 2 "single_register_operand" "c") 0))) (clobber (reg:CC CC_REG))] "() && ia16_arith_operands_p (, operands)" "\t%%cl,\t%0" ) (define_insn "*3" [(set (match_operand:MO 0 "nonimmediate_operand" "=m") (any_shift:MO (match_operand:MO 1 "nonimmediate_operand" "0") (match_operand:QI 2 "single_nonmemory_operand" "RclIsh"))) (clobber (reg:CC CC_REG))] "() && ia16_arith_operands_p (, operands)" "\t%2,\t%0" ) ; FIXME: Don't we want here and below also? ; Shifts by 0 don't set flags. So variable shifts need some kind of support ; for getting the information from the VRP pass. ; Only shifts of one bit set the overflow flag. ; TODO: We likely want a _subreg version for operand2 (see e.g. pushqi1_subreg). (define_insn "*3_cc_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (any_shiftcc:MO (match_operand:MO 1 "nonimmediate_operand" "0") (match_operand:QI 2 "const_int_operand" "RclIsh")) (const_int 0))) (set (match_operand:MO 0 "nonimmediate_operand" "=m") (any_shiftcc:MO (match_dup 1) (match_dup 2)))] "INTVAL (operands[2]) != 0 && ia16_arith_operands_p (, operands)" "\t%2,\t%0" ) (define_insn "*3_cconly_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (any_shiftcc:MO (match_operand:MO 1 "nonimmediate_operand" "0") (match_operand:QI 2 "const_int_operand" "RclIsh")) (const_int 0))) (clobber (match_scratch:MO 0 "=m"))] "INTVAL (operands[2]) != 0 && ia16_arith_operands_p (, operands)" "\t%2,\t%0" ) ; Shift operands are promoted to "unsigned int" in C. ; TODO: An insn for ashiftrt. (define_insn "*qi_promoted" [(set (match_operand:QI 0 "nonimmediate_operand" "=qm") (subreg:QI (any_lshift:HI (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0")) (match_operand:QI 2 "single_nonmemory_operand" "RclIsh")) 0)) (clobber (reg:CC CC_REG))] "() && ia16_arith_operands_p (, operands)" "b\t%2,\t%0" ) ; This is for combine, to split off the outer zero_extend. ; TODO: Mem:QI -> mem:HI would be rejected by ia16_arith_operands_p(). (define_split [(set (match_operand:HI 0 "register_operand") (zero_extend:HI (subreg:QI (any_lshift:HI (zero_extend:HI (match_operand:QI 1 "register_operand")) (match_operand:QI 2 "single_nonmemory_operand")) 0))) (clobber (reg:CC CC_REG))] "() && ia16_arith_operands_p (, operands)" [(parallel [(set (match_dup 3) (subreg:QI (any_lshift:HI (zero_extend:HI (match_dup 1)) (match_dup 2)) 0)) (clobber (reg:CC CC_REG)) ]) (parallel [(set (match_dup 0) (and:HI (match_dup 0) (const_int 255))) (clobber (reg:CC CC_REG)) ])] { operands[3] = simplify_gen_subreg (QImode, operands[0], HImode, 0); }) ; Seen in a disassembly of regexp.o. Let combine fix it. ; mov -396(%bp),%ax ; mov $0xf,%cl ; shr %cl,%ax ; mov -396(%bp),%cx ; shl %cx (define_insn "*lshlsi_const1_twoparts" [(set (match_operand:HI 0 "register_operand" "=r") (ashift:HI (match_operand:HI 1 "register_operand" "0") (const_int 15))) (set (match_operand:HI 2 "register_operand" "=&r") (lshiftrt:HI (match_dup 1) (const_int 1))) (clobber (reg:CC CC_REG))] "" "xorw\t%2,%2\;shlw\t%0\;rclw\t%2" ) ; One operand V2QI/QI/HImode not. (define_expand "one_cmpl2" [(set (match_operand:LE16 0 "nonimmediate_operand") (not:LE16 (match_operand:LE16 1 "nonimmediate_operand")))] "" { rtx tmp; if (MEM_P (operands[1]) && !rtx_equal_p (operands[0], operands[1])) operands[1] = force_reg (mode, operands[1]); if (MEM_P (operands[0]) && !MEM_P (operands[1])) { tmp = gen_reg_rtx (mode); emit_insn (gen_one_cmpl2 (tmp, operands[1])); emit_move_insn (operands[0], tmp); DONE; } }) (define_insn "*one_cmpl2" [(set (match_operand:LE16 0 "nonimmediate_operand" "=m") (not:LE16 (match_operand:LE16 1 "nonimmediate_operand" "0")))] "(!MEM_P (operands[0]) && !MEM_P (operands[1])) || rtx_equal_p (operands[0], operands[1])" "not\t%0" ) ; This is very similiar to what the middle end will generate, but for some ; reason the register allocator likes this better when the expression was ; (neg (abs (...)) before expand and combine swaps the operands of MINUS. (define_expand "abs2" [(set (match_dup 3) (match_operand:MO 1 "general_operand")) (set (match_dup 2) (ashiftrt:MO (match_dup 3) (match_dup 4))) (parallel [ (set (match_operand:MO 0 "nonimmediate_operand") (xor:MO (match_dup 3) (match_dup 2))) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 0) (minus:MO (match_dup 0) (match_dup 2))) (clobber (reg:CC CC_REG))] )] "" { operands[2] = gen_reg_rtx (mode); operands[3] = gen_reg_rtx (mode); operands[4] = GEN_INT (GET_MODE_BITSIZE (mode) - 1); }) ; The template is a dummy. (define_expand "cmp" [(set (reg:CC CC_REG) (compare:CC (match_operand:MO 0 "nonimmediate_operand") (match_operand:MO 1 "general_operand")))] "" { ia16_cmp_op0 = operands[0]; ia16_cmp_op1 = operands[1]; DONE; }) ; EQ/NE comparisons are usually best done in CCZ_C or CCZ_NC mode. (define_insn "*cmp_const0_ccz_c" [(set (reg:CCZ_C CC_REG) (compare:CCZ_C (match_operand:MO 0 "nonimmediate_operand" "m") (const_int 0)))] "" "cmp\t$1,\t%0" ) (define_insn "*cmp_constm1_ccz_nc" [(set (reg:CCZ_NC CC_REG) (compare:CCZ_NC (match_operand:MO 0 "nonimmediate_operand" "m") (const_int -1)))] "" "cmp\t$-1,\t%0" ) ; LEU and GTU comparisons against constants can use the more favorable ; CCCZ_Cmode instead of CCCZmode. We really use "a < c+1" instead of "a <= c" ; and "a >= c+1" instead of "a > c". It is ok for c+1 to overflow. (define_insn "*cmp_cccz_c" [(set (reg:CCCZ_C CC_REG) (compare:CCCZ_C (match_operand:MO 0 "nonimmediate_operand" ",m,m") (match_operand:MO 1 "immediate_operand" "M1,M1,i")))] "" "@ and\t%0,\t%0 cmp\t$0,\t%0 cmp\t%1+1,\t%0" ) ; Comparisons against zero. They don't need to set the overflow flag. (define_insn "*cmp_const0_" [(set (reg:SET_SCZ CC_REG) (compare:SET_SCZ (match_operand:MO 0 "nonimmediate_operand" ",m") (const_int 0)))] "" "@ and\t%0,\t%0 cmp\t$0,\t%0" ) ; The standard comparison insn, setting all flags. (define_insn "*cmp_" [(set (reg:SET_SOCZ CC_REG) (compare:SET_SOCZ (match_operand:MO 0 "nonimmediate_operand" ",,m") (match_operand:MO 1 "general_operand" "Z,,i")))] "!MEM_P (operands[0]) || !MEM_P (operands[1])" "@ and\t%0,\t%0 cmp\t%1,\t%0 cmp\t%1,\t%0" ) ; Combine seems to be unable to get this right expressed with AND. (define_insn_and_split "*extzv_cconly_ccz" [(set (reg:CCZ CC_REG) (compare:CCZ (zero_extract (match_operand:MO 0 "nonimmediate_operand" "m") (match_operand:QI 1 "const_int_operand" "n") (match_operand:QI 2 "const_int_operand" "n")) (const_int 0)))] "" "#" "" [(set (reg:CCZ CC_REG) (compare:CCZ (and:MO (match_dup 0) (match_dup 3)) (const_int 0)))] { operands[3] = gen_int_mode (((1 << INTVAL (operands[1])) - 1) << INTVAL (operands[2]), mode); }) ; The sign extension patterns have expanders which make HI/QImode pseudos to ; work around register allocation problems. ; TODO Why are extendhisi2 and extendqihi2 so different? (define_expand "extendhisi2" [(set (match_dup 2) (match_operand:HI 1 "register_operand")) (set (match_operand:SI 0 "register_operand") (sign_extend:SI (match_dup 2)))] "" { operands[2] = gen_reg_rtx (HImode); }) (define_insn "*extendhisi2" [(set (match_operand:SI 0 "register_operand" "=A") (sign_extend:SI (match_operand:HI 1 "single_register_operand" "0")))] "" "cwtd" ) (define_expand "extendqihi2" [(set (match_dup 3) (match_operand:QI 1 "register_operand")) (set (match_dup 2) (sign_extend:HI (match_dup 3))) (set (match_operand:HI 0 "register_operand") (match_dup 2))] "" { operands[2] = gen_reg_rtx (HImode); operands[3] = gen_reg_rtx (QImode); }) (define_insn "*extendqihi2" [(set (match_operand:HI 0 "single_register_operand" "=a") (sign_extend:HI (match_operand:QI 1 "register_operand" "0")))] "" "cbtw" ) ; GCC convert_move() handles zero extension to modes larger than a word just ; fine. For smaller modes, however, it defaults to a pair of unsigned shifts. ; We want to use an AND instruction instead. (define_expand "zero_extendqihi2" [(set (match_dup 2) (match_operand:QI 1 "general_operand")) (parallel [ (set (match_dup 3) (and:HI (match_dup 3) (const_int 255))) (clobber (reg:CC CC_REG)) ]) (set (match_operand:HI 0 "nonimmediate_operand") (match_dup 3))] "" { operands[3] = gen_reg_rtx (HImode); operands[2] = simplify_gen_subreg (QImode, operands[3], HImode, 0); }) ; Combine prefers ZERO_EXTEND over AND. Undo this transformation. (define_insn_and_split "*zero_extendqihi2" [(set (match_operand:HI 0 "nonimmediate_operand" "=r") (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0"))) (clobber (reg:CC CC_REG))] "(!MEM_P (operands[0]) && !MEM_P (operands[1])) || (MEM_P (operands[0]) && MEM_P (operands[1]) && rtx_equal_p (XEXP (operands[0], 0), XEXP (operands[1], 0)))" "#" "" [(parallel [(set (match_dup 0) (and:HI (match_dup 2) (const_int 255))) (clobber (reg:CC CC_REG))] )] { /* We may be zero extending directly in memory and must not generate (set (mem:HI x) (and:HI (subreg:HI (mem:QI x) 0))). */ if (MEM_P (operands[0]) && MEM_P (operands[1]) && rtx_equal_p (XEXP (operands[0], 0), XEXP (operands[1], 0))) operands[2] = widen_memory_access (operands[1], HImode, 0); else operands[2] = simplify_gen_subreg (HImode, operands[1], QImode, 0); }) ;; The scond patterns. IA-16 only has limited capabilities. ; The following assumes STORE_FLAG_VALUE == -1. ; We expand using QImode instead of HImode because ; 1) QImode is requested for things like (a == 2 | b). ; 2) Combine can optimize scondqi + sign extension into scondhi if needed ; but not optimize away a truncation. ; The template is a dummy. (define_expand "s" [(parallel [(set (match_operand:QI 0 "register_operand") (scc_cond:QI (reg:CC CC_REG) (const_int 0))) (clobber (reg:CC CC_REG))] )] "" { rtx cc_reg, op1; enum machine_mode cc_mode; cc_reg = ia16_gen_compare_reg (, ia16_cmp_op0, ia16_cmp_op1, false); cc_mode = GET_MODE (cc_reg); op1 = gen_rtx_fmt_ee (, QImode, cc_reg, const0_rtx); /* We can do anything which tests only the carry flag. */ if (carry_flag_operator (op1, QImode) || carry_not_flag_operator (op1, QImode)) emit_insn (gen__scond (operands[0], op1)); /* We can do slt as long as we don't need the overflow flag. */ else if (( == LT && ia16_cc_modes_compatible (CCSCZmode, cc_mode) == CCSCZmode) /* We can do seq as long as the zero flag is set. */ || ( == EQ && ia16_cc_modes_compatible (CCZmode, cc_mode) == cc_mode)) emit_insn (gen__scond_scratch (operands[0], op1)); else FAIL; DONE; }) ; Expand scond operations which don't need a scratch register. (define_expand "_scond" [(parallel [(set (match_operand:QI 0 "register_operand") (match_operand:QI 1)) (clobber (reg:CC CC_REG))] )] ) ; Expand scond operations needing scratch registers: ; seq from zero flag and slt from sign flag. ; TODO: Expand this sequence properly. (define_expand "_scond_scratch" [(parallel [ (set (match_dup 4) (match_operand:QI 1)) (clobber (match_scratch:QI 2)) (clobber (match_scratch:QI 3)) (clobber (reg:CC CC_REG)) ]) (set (match_operand:QI 0 "register_operand") (match_dup 4))] "" { operands[4] = gen_reg_rtx (QImode); }) ; GAS does not know about salc (opcode 0xd6), also known as setalc or setcal. :-( (define_insn "*scond_carry" [(set (match_operand:MO 0 "register_operand" "=") (match_operand:MO 1 "carry_flag_operator")) (clobber (reg:CC CC_REG))] "" "sbb\t%0,\t%0" ) ; This is only to help combine build up complex instructions. (define_insn_and_split "*scond_carry_neg" [(set (match_operand:MO 0 "register_operand" "=") (neg:MO (match_operand:MO 1 "carry_flag_operator"))) (clobber (reg:CC CC_REG))] "" "#" "" [(parallel [(set (match_dup 0) (match_dup 1)) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 0) (minus:MO (const_int 0) (match_dup 0))) (clobber (reg:CC CC_REG))] )] ) (define_insn_and_split "*scond_carry_not" [(set (match_operand:MO 0 "register_operand" "=") (match_operand:MO 1 "carry_not_flag_operator")) (clobber (reg:CC CC_REG))] "" "#" "" [(set (reg:CCC CC_REG) (unspec:CCC [(match_dup 2)] UNSPEC_NOT_CARRY)) (parallel [(set (match_dup 0) (ltu:MO (reg:CCC CC_REG) (const_int 0))) (clobber (reg:CC CC_REG))] )] { operands[2] = XEXP (operands[1], 0); }) ; Split "cmc; sbb reg,reg; neg reg" into "sbb reg,reg; inc reg". (define_insn_and_split "*scc_carry_not_neg" [(set (match_operand:MO 0 "register_operand" "=") (neg:MO (match_operand:MO 1 "carry_not_flag_operator"))) (clobber (reg:CC CC_REG))] "" "#" "" [(parallel [(set (match_dup 0) (match_dup 2)) (clobber (reg:CC CC_REG)) ]) (parallel [ (set (match_dup 0) (plus:MO (match_dup 0) (const_int 1))) (clobber (reg:CC CC_REG))] )] { operands[2] = gen_rtx_fmt_ee (reverse_condition (GET_CODE (operands[1])), mode, XEXP (operands[1], 0), XEXP (operands[1], 1)); }) (define_insn "*one_cmplccc2" [(set (reg:CCC CC_REG) (unspec:CCC [(reg CC_REG)] UNSPEC_NOT_CARRY))] "" "cmc" ) ; TODO: *sltsi (define_insn "*sltqi_" [(set (match_operand:QI 0 "register_operand" "=Rdl,?Rah") (lt:QI (reg:USE_S CC_REG) (const_int 0))) (clobber (match_scratch:QI 1 "=Rah,Ral")) (clobber (match_scratch:QI 2 "=Rdh,X")) (clobber (reg:CC CC_REG))] "" "@ lahf\;cwtd lahf\;movb\t%0,\t%1\;cbtw" ) (define_insn "*slthi_" [(set (match_operand:HI 0 "single_register_operand" "=d") (lt:HI (reg:USE_S CC_REG) (const_int 0))) (clobber (match_scratch:QI 1 "=Rah")) (clobber (reg:CC CC_REG))] "" "lahf\;cwtd" ) (define_insn "*seqqi_" [(set (match_operand:QI 0 "register_operand" "=Rdl,?Rah") (eq:QI (reg:USE_Z CC_REG) (const_int 0))) (clobber (match_scratch:QI 1 "=Rah,Ral")) (clobber (match_scratch:QI 2 "=Rdh,X")) (clobber (reg:CC CC_REG))] "" "@ lahf\;shlb\t%1\;cwtd lahf\;shlb\t%0\;movb\t%0,\t%1\;cbtw" ) (define_insn "*seqhi_" [(set (match_operand:HI 0 "single_register_operand" "=d") (eq:HI (reg:USE_Z CC_REG) (const_int 0))) (clobber (match_scratch:QI 1 "=Rah")) (clobber (reg:CC CC_REG))] "" "lahf\;shlb\t%1\;cwtd" ) ; "A" means dx:ax, "j" means bx:dx. (define_insn_and_split "*seqsi_" [(set (match_operand:SI 0 "register_operand" "=A,j") (eq:SI (reg:USE_Z CC_REG) (const_int 0))) (clobber (match_scratch:QI 1 "=X,Rah")) (clobber (reg:CC CC_REG))] "" "#" "reload_completed" [(parallel [(set (reg:HI D_REG) (eq:HI (reg:USE_Z CC_REG) (const_int 0))) (clobber (reg:QI AH_REG)) (clobber (reg:CC CC_REG)) ]) (set (match_dup 2) (reg:HI D_REG))] { if (REGNO (operands[0]) == A_REG) operands[2] = gen_rtx_REG (HImode, A_REG); else if (REGNO (operands[0]) == D_REG) operands[2] = gen_rtx_REG (HImode, B_REG); else gcc_unreachable (); }) (define_expand "parityhi2" [(set (match_dup 2) (match_operand:HI 1 "general_operand")) (parallel [ (set (reg:CC CC_REG) (compare:CC (xor:HI (and:HI (match_dup 2) (const_int -256)) (ashift:HI (match_dup 2) (const_int 8))) (const_int 0))) (clobber (match_dup 2)) ]) (parallel [ (set (match_operand:HI 0 "register_operand") (parity:HI (reg:CC CC_REG))) (clobber (match_scratch:QI 2)) (clobber (reg:CC CC_REG))] )] "" { operands[2] = gen_reg_rtx (HImode); }) (define_expand "parityqi2" [(set (reg:CC CC_REG) (compare:CC (match_operand:QI 1 "general_operand") (const_int 0))) (parallel [ (set (match_operand:QI 0 "register_operand") (parity:QI (reg:CC CC_REG))) (clobber (match_scratch:QI 2)) (clobber (reg:CC CC_REG))] )] ) (define_insn "*cmphi_const0_cconly_for_parity" [(set (reg:CC CC_REG) (compare:CC (xor:HI (and:HI (match_operand:HI 0 "register_operand" "=q") (const_int -256)) (ashift:HI (match_dup 0) (const_int 8))) (const_int 0))) (clobber (match_dup 0))] "" "xorb\t%L0,\t%H0" ) ; The parity flag is set for even parity, i.e. when we want to return 0. (define_insn "*parityhi2" [(set (match_operand:HI 0 "register_operand" "=a,r") (parity:HI (reg:CC CC_REG))) (clobber (match_scratch:QI 1 "=X,Rah")) (clobber (reg:CC CC_REG))] "" "@ lahf\;andb\t$0x4,\t%H0\;addb\t$0xfc,\t%H0\;sbbw\t%0,\t%0\;incw\t%0 lahf\;andb\t$0x4,\t%1\;addb\t$0xfc,\t%1\;sbbw\t%0,\t%0\;incw\t%0" ) (define_insn "*parityqi2" [(set (match_operand:QI 0 "register_operand" "=Rah,q") (parity:QI (reg:CC CC_REG))) (clobber (match_scratch:QI 1 "=X,Rah")) (clobber (reg:CC CC_REG))] "" "@ lahf\;andb\t$0x4,\t%0\;addb\t$0xfc,\t%0\;sbbb\t%0,\t%0\;incb\t%0 lahf\;andb\t$0x4,\t%1\;addb\t$0xfc,\t%1\;sbbb\t%0,\t%0\;incb\t%0" ) ; Various conditional branches. (define_expand "b" [(set (pc) (if_then_else (any_cond (match_dup 1) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" { operands[1] = ia16_gen_compare_reg (, ia16_cmp_op0, ia16_cmp_op1, true); }) ; Branches using only the carry flag. (define_insn "*bcond_carry" [(set (pc) (if_then_else (match_operand 1 "carry_flag_operator") (label_ref (match_operand 0)) (pc)))] "" "jc\t%l0" ) (define_insn "*bcond_carry_not" [(set (pc) (if_then_else (match_operand 1 "carry_not_flag_operator") (label_ref (match_operand 0)) (pc)))] "" "jnc\t%l0" ) ; Branches using the zero flag. (define_insn "*b_" [(set (pc) (if_then_else (any_cond_z (reg:USE_Z CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "\t%l0" ) ; Branches using the carry and zero flags. (define_insn "*b_" [(set (pc) (if_then_else (any_cond_cz (reg:USE_CZ CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "\t%l0" ) ; Branches using the sign and overflow flags. (define_insn "*b_" [(set (pc) (if_then_else (any_cond_so (reg:USE_SO CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "\t%l0" ) ; Branches using the sign, overflow and zero flags. (define_insn "*b_" [(set (pc) (if_then_else (any_cond_soz (reg:USE_SOZ CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "\t%l0" ) ; Branches using only the sign flag. (define_insn "*blt_" [(set (pc) (if_then_else (lt (reg:USE_S CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "js\t%l0" ) (define_insn "*bge_" [(set (pc) (if_then_else (ge (reg:USE_S CC_REG) (const_int 0)) (label_ref (match_operand 0)) (pc)))] "" "jns\t%l0" ) ; Docs ("13.11 Interdependence of Patterns") say we should have a reverse- ; conditional branch for each conditional branch. ; Branches using only the carry flag. (define_insn "*bcond_carry_rev" [(set (pc) (if_then_else (match_operand 1 "carry_flag_operator") (pc) (label_ref (match_operand 0))))] "" "jnc\t%l0" ) (define_insn "*bcond_carry_not_rev" [(set (pc) (if_then_else (match_operand 1 "carry_not_flag_operator") (pc) (label_ref (match_operand 0))))] "" "jc\t%l0" ) (define_insn "*b__rev" [(set (pc) (if_then_else (any_cond_z (reg:USE_Z CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "\t%l0" ) (define_insn "*b__rev" [(set (pc) (if_then_else (any_cond_cz (reg:USE_CZ CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "\t%l0" ) (define_insn "*b__rev" [(set (pc) (if_then_else (any_cond_so (reg:USE_SO CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "\t%l0" ) (define_insn "*b__rev" [(set (pc) (if_then_else (any_cond_soz (reg:USE_SOZ CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "\t%l0" ) ; Branches using only the sign flag. (define_insn "*blt__rev" [(set (pc) (if_then_else (lt (reg:USE_S CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "jns\t%l0" ) (define_insn "*bge__rev" [(set (pc) (if_then_else (ge (reg:USE_S CC_REG) (const_int 0)) (pc) (label_ref (match_operand 0))))] "" "js\t%l0" ) ; FIXME This instruction can only jump [-128;127] bytes. GAS doesn't work ; around it. (define_insn "*jump_if_cx_zero" [(set (pc) (if_then_else (eq (match_operand:HI 0 "single_register_operand" "c") (const_int 0)) (label_ref (match_operand 1)) (pc)))] "TARGET_SHORT_JUMPS" "jcxz\t%l1" ) (define_insn_and_split "*load_cx_jump_if_zero" [(set (pc) (if_then_else (eq (match_operand:HI 2 "general_operand" "g") (const_int 0)) (label_ref (match_operand 1)) (pc))) (set (match_operand:HI 0 "single_register_operand" "=c") (match_dup 2))] "TARGET_SHORT_JUMPS" "#" "reload_completed" [(set (match_dup 0) (match_dup 2)) (set (pc) (if_then_else (eq (match_dup 0) (const_int 0)) (label_ref (match_dup 1)) (pc)))] ) (define_insn "jump" [(set (pc) (label_ref (match_operand 0 "")))] "" "jmp\t%l0" ) ; This is made needlessly complicated by a combination of inconsistent use of ; (mem x) expressions and an inconsistent assembler syntax. Basicly, there is ; an extra (mem x) around the function address. (define_expand "call" [(call (match_operand:PQI 0 "") (match_operand:HI 1 "general_operand"))] "" ) (define_insn "*call" [(call (mem:PQI (match_operand:HI 0 "general_operand" "g")) (match_operand:HI 1 "general_operand" "g"))] "" { if (MEM_P (operands[0])) return ("call\t*%0"); else if (CONSTANT_P (operands[0])) return ("call\t%c0"); else return ("call\t*%0"); }) (define_expand "call_value" [(set (match_operand 0 "register_operand") (call (match_operand:PQI 1 "") (match_operand:HI 2 "general_operand")))] "" ) (define_insn "*call_value" [(set (match_operand 0 "register_operand" "=r") (call (mem:PQI (match_operand:HI 1 "general_operand" "g")) (match_operand:HI 2 "general_operand" "g")))] "" { if (MEM_P (operands[1])) return ("call\t*%1"); else if (CONSTANT_P (operands[1])) return ("call\t%c1"); else return ("call\t*%1"); }) (define_insn "nop" [(const_int 0)] "" "nop" ) (define_insn "indirect_jump" [(set (pc) (match_operand:HI 0 "nonimmediate_operand" "rm"))] "" "jmp\t*%0" ) ; Taken almost verbatim from the GCC Internals manual. ; FIXME Manual says "general_operand" which is invalid. ; FIXME This instruction can only jump [-128;127] bytes. GAS doesn't work around it. (define_insn "decrement_and_branch_until_zero" [(set (pc) (if_then_else (ge (plus:HI (match_operand:HI 0 "nonimmediate_operand" "+c*rm") (const_int -1)) (const_int 0)) (label_ref (match_operand 1 "")) (pc))) (set (match_dup 0) (plus:HI (match_dup 0) (const_int -1)))] "TARGET_SHORT_JUMPS" "loop\t%c1" ) ; Keep ia16_initial_frame_pointer_offset() in sync whith this pattern. ; Keep ia16_first_parm_offset() in sync with this pattern. (define_expand "prologue" [(const_int 1)] "" { rtx insn; unsigned int i; HOST_WIDE_INT size = get_frame_size () + current_function_outgoing_args_size + current_function_pretend_args_size; /* Save used registers which are not call clobbered. */ for (i = 0; i < BP_REG; i ++) { if (ia16_save_reg_p (i)) { insn = emit_insn (ia16_push_reg (i)); RTX_FRAME_RELATED_P (insn) = 1; } } if (ia16_save_reg_p (ES_REG)) { insn = emit_insn (ia16_push_reg (ES_REG)); RTX_FRAME_RELATED_P (insn) = 1; } if (HAVE__enter && frame_pointer_needed && ! call_used_regs[BP_REG] && size) { insn = gen__enter (gen_rtx_CONST_INT (HImode, size + 2)); insn = emit_insn (insn); RTX_FRAME_RELATED_P (insn) = 1; DONE; } if (! frame_pointer_needed && ia16_save_reg_p(BP_REG)) { insn = emit_insn (ia16_push_reg (BP_REG)); RTX_FRAME_RELATED_P (insn) = 1; } if (frame_pointer_needed && ! call_used_regs[BP_REG]) { insn = emit_insn (ia16_push_reg (BP_REG)); RTX_FRAME_RELATED_P (insn) = 1; } if (frame_pointer_needed) { insn = emit_move_insn (gen_rtx_REG (Pmode, BP_REG), gen_rtx_REG (Pmode, SP_REG)); RTX_FRAME_RELATED_P (insn) = 1; } if (size) { insn = emit_insn (gen_subhi3 (gen_rtx_REG (Pmode, SP_REG), gen_rtx_REG (Pmode, SP_REG), gen_rtx_CONST_INT (HImode, size))); RTX_FRAME_RELATED_P (insn) = 1; } DONE; }) (define_insn "_enter" [(set (mem:HI (plus:HI (reg:HI SP_REG) (const_int -2))) (reg:HI BP_REG)) (set (reg:HI BP_REG) (reg:HI SP_REG)) (set (reg:HI SP_REG) (minus:HI (reg:HI SP_REG) (match_operand:HI 0 "immediate_operand" "i")))] "TARGET_ENTER_LEAVE" "enterw\t%0-2,\t$0" ) (define_expand "epilogue" [(return)] "" { unsigned int i; HOST_WIDE_INT size = get_frame_size () + current_function_outgoing_args_size + current_function_pretend_args_size; /* We need to restore the stack pointer. We have two options: * 1) Add the frame size to sp. * 2) Use the saved sp value in bp. * The second option makes it possible to use the leave instruction and * is shorter in any case, but ties up a register. So only refer to bp * if it wasn't used during the function. */ if (HAVE__leave && frame_pointer_needed && ! call_used_regs [BP_REG]) { /* If sp wasn't modified, "popw %bp" is faster than "leavew". */ if (size == 0 && current_function_sp_is_unchanging) emit_insn (ia16_pop_reg (BP_REG)); else emit_insn (gen__leave ()); } else if (frame_pointer_needed) { if (size) { emit_move_insn (gen_rtx_REG (Pmode, SP_REG), gen_rtx_REG (Pmode, BP_REG)); } if (! call_used_regs [BP_REG]) { emit_insn (ia16_pop_reg (BP_REG)); } } else /* ! frame_pointer_needed */ { if (size) { emit_insn (gen_addhi3 (gen_rtx_REG (Pmode, SP_REG), gen_rtx_REG (Pmode, SP_REG), gen_rtx_CONST_INT (HImode, size))); } if (ia16_save_reg_p (BP_REG)) { emit_insn (ia16_pop_reg (BP_REG)); } } /* Restore used registers. */ if (ia16_save_reg_p (ES_REG)) emit_insn (ia16_pop_reg (ES_REG)); for (i = DI_REG; i < FIRST_PSEUDO_REGISTER; i --) { if (ia16_save_reg_p (i)) { emit_insn (ia16_pop_reg (i)); } } }) (define_insn "_leave" [(set (reg:HI SP_REG) (plus:HI (reg:HI BP_REG) (const_int 2))) (set (reg:HI BP_REG) (mem:HI (reg:HI BP_REG)))] "TARGET_ENTER_LEAVE" "leavew" ) (define_insn "*return" [(return)] "reload_completed" "ret" ) (define_insn "sync_lock_test_and_set" [(set (match_operand:MO 0 "register_operand" "=") (match_operand:MO 1 "memory_operand" "+m")) (set (match_dup 1) (match_operand:MO 2 "nonmemory_operand" "0"))] "" "lock\;xchg\t%0,\t%1" )