From 554fd8c5195424bdbcabf5de30fdc183aba391bd Mon Sep 17 00:00:00 2001 From: upstream source tree Date: Sun, 15 Mar 2015 20:14:05 -0400 Subject: obtained gcc-4.6.4.tar.bz2 from upstream website; verified gcc-4.6.4.tar.bz2.sig; imported gcc-4.6.4 source tree from verified upstream tarball. downloading a git-generated archive based on the 'upstream' tag should provide you with a source tree that is binary identical to the one extracted from the above tarball. if you have obtained the source via the command 'git clone', however, do note that line-endings of files in your working directory might differ from line-endings of the respective files in the upstream repository. --- gcc/config/arm/thumb2.md | 1121 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1121 insertions(+) create mode 100644 gcc/config/arm/thumb2.md (limited to 'gcc/config/arm/thumb2.md') diff --git a/gcc/config/arm/thumb2.md b/gcc/config/arm/thumb2.md new file mode 100644 index 000000000..1b2fb2d44 --- /dev/null +++ b/gcc/config/arm/thumb2.md @@ -0,0 +1,1121 @@ +;; ARM Thumb-2 Machine Description +;; Copyright (C) 2007, 2008, 2010 Free Software Foundation, Inc. +;; Written by CodeSourcery, LLC. +;; +;; This file is part of GCC. +;; +;; GCC is free software; you can redistribute it and/or modify it +;; under the terms of the GNU General Public License as published by +;; the Free Software Foundation; either version 3, or (at your option) +;; any later version. +;; +;; GCC is distributed in the hope that it will be useful, but +;; WITHOUT ANY WARRANTY; without even the implied warranty of +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +;; General Public License for more details. +;; +;; You should have received a copy of the GNU General Public License +;; along with GCC; see the file COPYING3. If not see +;; . */ + +;; Note: Thumb-2 is the variant of the Thumb architecture that adds +;; 32-bit encodings of [almost all of] the Arm instruction set. +;; Some old documents refer to the relatively minor interworking +;; changes made in armv5t as "thumb2". These are considered part +;; the 16-bit Thumb-1 instruction set. + +(define_insn "*thumb2_incscc" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (plus:SI (match_operator:SI 2 "arm_comparison_operator" + [(match_operand:CC 3 "cc_register" "") (const_int 0)]) + (match_operand:SI 1 "s_register_operand" "0,?r")))] + "TARGET_THUMB2" + "@ + it\\t%d2\;add%d2\\t%0, %1, #1 + ite\\t%D2\;mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1" + [(set_attr "conds" "use") + (set_attr "length" "6,10")] +) + +(define_insn "*thumb2_decscc" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r") + (match_operator:SI 2 "arm_comparison_operator" + [(match_operand 3 "cc_register" "") (const_int 0)])))] + "TARGET_THUMB2" + "@ + it\\t%d2\;sub%d2\\t%0, %1, #1 + ite\\t%D2\;mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1" + [(set_attr "conds" "use") + (set_attr "length" "6,10")] +) + +;; Thumb-2 only allows shift by constant on data processing instructions +(define_insn "*thumb_andsi_not_shiftsi_si" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (and:SI (not:SI (match_operator:SI 4 "shift_operator" + [(match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 3 "const_int_operand" "M")])) + (match_operand:SI 1 "s_register_operand" "r")))] + "TARGET_THUMB2" + "bic%?\\t%0, %1, %2%S4" + [(set_attr "predicable" "yes") + (set_attr "shift" "2") + (set_attr "type" "alu_shift")] +) + +(define_insn "*thumb2_smaxsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (smax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%1, %2\;it\\tlt\;movlt\\t%0, %2 + cmp\\t%1, %2\;it\\tge\;movge\\t%0, %1 + cmp\\t%1, %2\;ite\\tge\;movge\\t%0, %1\;movlt\\t%0, %2" + [(set_attr "conds" "clob") + (set_attr "length" "10,10,14")] +) + +(define_insn "*thumb2_sminsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (smin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%1, %2\;it\\tge\;movge\\t%0, %2 + cmp\\t%1, %2\;it\\tlt\;movlt\\t%0, %1 + cmp\\t%1, %2\;ite\\tlt\;movlt\\t%0, %1\;movge\\t%0, %2" + [(set_attr "conds" "clob") + (set_attr "length" "10,10,14")] +) + +(define_insn "*thumb32_umaxsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%1, %2\;it\\tcc\;movcc\\t%0, %2 + cmp\\t%1, %2\;it\\tcs\;movcs\\t%0, %1 + cmp\\t%1, %2\;ite\\tcs\;movcs\\t%0, %1\;movcc\\t%0, %2" + [(set_attr "conds" "clob") + (set_attr "length" "10,10,14")] +) + +(define_insn "*thumb2_uminsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%1, %2\;it\\tcs\;movcs\\t%0, %2 + cmp\\t%1, %2\;it\\tcc\;movcc\\t%0, %1 + cmp\\t%1, %2\;ite\\tcc\;movcc\\t%0, %1\;movcs\\t%0, %2" + [(set_attr "conds" "clob") + (set_attr "length" "10,10,14")] +) + +;; Thumb-2 does not have rsc, so use a clever trick with shifter operands. +(define_insn "*thumb2_negdi2" + [(set (match_operand:DI 0 "s_register_operand" "=&r,r") + (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "negs\\t%Q0, %Q1\;sbc\\t%R0, %R1, %R1, lsl #1" + [(set_attr "conds" "clob") + (set_attr "length" "8")] +) + +(define_insn "*thumb2_abssi2" + [(set (match_operand:SI 0 "s_register_operand" "=r,&r") + (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%0, #0\;it\tlt\;rsblt\\t%0, %0, #0 + eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31" + [(set_attr "conds" "clob,*") + (set_attr "shift" "1") + ;; predicable can't be set based on the variant, so left as no + (set_attr "length" "10,8")] +) + +(define_insn "*thumb2_neg_abssi2" + [(set (match_operand:SI 0 "s_register_operand" "=r,&r") + (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "@ + cmp\\t%0, #0\;it\\tgt\;rsbgt\\t%0, %0, #0 + eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31" + [(set_attr "conds" "clob,*") + (set_attr "shift" "1") + ;; predicable can't be set based on the variant, so left as no + (set_attr "length" "10,8")] +) + +;; We have two alternatives here for memory loads (and similarly for stores) +;; to reflect the fact that the permissible constant pool ranges differ +;; between ldr instructions taking low regs and ldr instructions taking high +;; regs. The high register alternatives are not taken into account when +;; choosing register preferences in order to reflect their expense. +(define_insn "*thumb2_movsi_insn" + [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,l ,*hk,m,*m") + (match_operand:SI 1 "general_operand" "rk ,I,K,j,mi,*mi,l,*hk"))] + "TARGET_THUMB2 && ! TARGET_IWMMXT + && !(TARGET_HARD_FLOAT && TARGET_VFP) + && ( register_operand (operands[0], SImode) + || register_operand (operands[1], SImode))" + "@ + mov%?\\t%0, %1 + mov%?\\t%0, %1 + mvn%?\\t%0, #%B1 + movw%?\\t%0, %1 + ldr%?\\t%0, %1 + ldr%?\\t%0, %1 + str%?\\t%1, %0 + str%?\\t%1, %0" + [(set_attr "type" "*,*,*,*,load1,load1,store1,store1") + (set_attr "predicable" "yes") + (set_attr "pool_range" "*,*,*,*,1020,4096,*,*") + (set_attr "neg_pool_range" "*,*,*,*,0,0,*,*")] +) + +(define_insn "tls_load_dot_plus_four" + [(set (match_operand:SI 0 "register_operand" "=l,l,r,r") + (mem:SI (unspec:SI [(match_operand:SI 2 "register_operand" "0,1,0,1") + (const_int 4) + (match_operand 3 "" "")] + UNSPEC_PIC_BASE))) + (clobber (match_scratch:SI 1 "=X,l,X,r"))] + "TARGET_THUMB2" + "* + (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", + INTVAL (operands[3])); + return \"add\\t%2, %|pc\;ldr%?\\t%0, [%2]\"; + " + [(set_attr "length" "4,4,6,6")] +) + +;; Thumb-2 always has load/store halfword instructions, so we can avoid a lot +;; of the messiness associated with the ARM patterns. +(define_insn "*thumb2_movhi_insn" + [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r") + (match_operand:HI 1 "general_operand" "rI,n,r,m"))] + "TARGET_THUMB2" + "@ + mov%?\\t%0, %1\\t%@ movhi + movw%?\\t%0, %L1\\t%@ movhi + str%(h%)\\t%1, %0\\t%@ movhi + ldr%(h%)\\t%0, %1\\t%@ movhi" + [(set_attr "type" "*,*,store1,load1") + (set_attr "predicable" "yes") + (set_attr "pool_range" "*,*,*,4096") + (set_attr "neg_pool_range" "*,*,*,250")] +) + +(define_insn "*thumb2_cmpsi_neg_shiftsi" + [(set (reg:CC CC_REGNUM) + (compare:CC (match_operand:SI 0 "s_register_operand" "r") + (neg:SI (match_operator:SI 3 "shift_operator" + [(match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "const_int_operand" "M")]))))] + "TARGET_THUMB2" + "cmn%?\\t%0, %1%S3" + [(set_attr "conds" "set") + (set_attr "shift" "1") + (set_attr "type" "alu_shift")] +) + +(define_insn "*thumb2_mov_scc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)]))] + "TARGET_THUMB2" + "ite\\t%D1\;mov%D1\\t%0, #0\;mov%d1\\t%0, #1" + [(set_attr "conds" "use") + (set_attr "length" "10")] +) + +(define_insn "*thumb2_mov_negscc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (neg:SI (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)])))] + "TARGET_THUMB2" + "ite\\t%D1\;mov%D1\\t%0, #0\;mvn%d1\\t%0, #0" + [(set_attr "conds" "use") + (set_attr "length" "10")] +) + +(define_insn "*thumb2_mov_notscc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (not:SI (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)])))] + "TARGET_THUMB2" + "ite\\t%D1\;mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1" + [(set_attr "conds" "use") + (set_attr "length" "10")] +) + +(define_insn "*thumb2_movsicc_insn" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r") + (if_then_else:SI + (match_operator 3 "arm_comparison_operator" + [(match_operand 4 "cc_register" "") (const_int 0)]) + (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K") + (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))] + "TARGET_THUMB2" + "@ + it\\t%D3\;mov%D3\\t%0, %2 + it\\t%D3\;mvn%D3\\t%0, #%B2 + it\\t%d3\;mov%d3\\t%0, %1 + it\\t%d3\;mvn%d3\\t%0, #%B1 + ite\\t%d3\;mov%d3\\t%0, %1\;mov%D3\\t%0, %2 + ite\\t%d3\;mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2 + ite\\t%d3\;mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2 + ite\\t%d3\;mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2" + [(set_attr "length" "6,6,6,6,10,10,10,10") + (set_attr "conds" "use")] +) + +(define_insn "*thumb2_movsfcc_soft_insn" + [(set (match_operand:SF 0 "s_register_operand" "=r,r") + (if_then_else:SF (match_operator 3 "arm_comparison_operator" + [(match_operand 4 "cc_register" "") (const_int 0)]) + (match_operand:SF 1 "s_register_operand" "0,r") + (match_operand:SF 2 "s_register_operand" "r,0")))] + "TARGET_THUMB2 && TARGET_SOFT_FLOAT" + "@ + it\\t%D3\;mov%D3\\t%0, %2 + it\\t%d3\;mov%d3\\t%0, %1" + [(set_attr "length" "6,6") + (set_attr "conds" "use")] +) + +(define_insn "*call_reg_thumb2" + [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r")) + (match_operand 1 "" "")) + (use (match_operand 2 "" "")) + (clobber (reg:SI LR_REGNUM))] + "TARGET_THUMB2" + "blx%?\\t%0" + [(set_attr "type" "call")] +) + +(define_insn "*call_value_reg_thumb2" + [(set (match_operand 0 "" "") + (call (mem:SI (match_operand:SI 1 "register_operand" "l*r")) + (match_operand 2 "" ""))) + (use (match_operand 3 "" "")) + (clobber (reg:SI LR_REGNUM))] + "TARGET_THUMB2" + "blx\\t%1" + [(set_attr "type" "call")] +) + +(define_insn "*thumb2_indirect_jump" + [(set (pc) + (match_operand:SI 0 "register_operand" "l*r"))] + "TARGET_THUMB2" + "bx\\t%0" + [(set_attr "conds" "clob")] +) +;; Don't define thumb2_load_indirect_jump because we can't guarantee label +;; addresses will have the thumb bit set correctly. + + +(define_insn "*thumb2_and_scc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (and:SI (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 3 "cc_register" "") (const_int 0)]) + (match_operand:SI 2 "s_register_operand" "r")))] + "TARGET_THUMB2" + "ite\\t%D1\;mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1" + [(set_attr "conds" "use") + (set_attr "length" "10")] +) + +(define_insn "*thumb2_ior_scc" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (ior:SI (match_operator:SI 2 "arm_comparison_operator" + [(match_operand 3 "cc_register" "") (const_int 0)]) + (match_operand:SI 1 "s_register_operand" "0,?r")))] + "TARGET_THUMB2" + "@ + it\\t%d2\;orr%d2\\t%0, %1, #1 + ite\\t%D2\;mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1" + [(set_attr "conds" "use") + (set_attr "length" "6,10")] +) + +(define_insn "*thumb2_cond_move" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (if_then_else:SI (match_operator 3 "equality_operator" + [(match_operator 4 "arm_comparison_operator" + [(match_operand 5 "cc_register" "") (const_int 0)]) + (const_int 0)]) + (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))] + "TARGET_THUMB2" + "* + if (GET_CODE (operands[3]) == NE) + { + if (which_alternative != 1) + output_asm_insn (\"it\\t%D4\;mov%D4\\t%0, %2\", operands); + if (which_alternative != 0) + output_asm_insn (\"it\\t%d4\;mov%d4\\t%0, %1\", operands); + return \"\"; + } + switch (which_alternative) + { + case 0: + output_asm_insn (\"it\\t%d4\", operands); + break; + case 1: + output_asm_insn (\"it\\t%D4\", operands); + break; + case 2: + output_asm_insn (\"ite\\t%D4\", operands); + break; + default: + abort(); + } + if (which_alternative != 0) + output_asm_insn (\"mov%D4\\t%0, %1\", operands); + if (which_alternative != 1) + output_asm_insn (\"mov%d4\\t%0, %2\", operands); + return \"\"; + " + [(set_attr "conds" "use") + (set_attr "length" "6,6,10")] +) + +(define_insn "*thumb2_cond_arith" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (match_operator:SI 5 "shiftable_operator" + [(match_operator:SI 4 "arm_comparison_operator" + [(match_operand:SI 2 "s_register_operand" "r,r") + (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]) + (match_operand:SI 1 "s_register_operand" "0,?r")])) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx) + return \"%i5\\t%0, %1, %2, lsr #31\"; + + output_asm_insn (\"cmp\\t%2, %3\", operands); + if (GET_CODE (operands[5]) == AND) + { + output_asm_insn (\"ite\\t%D4\", operands); + output_asm_insn (\"mov%D4\\t%0, #0\", operands); + } + else if (GET_CODE (operands[5]) == MINUS) + { + output_asm_insn (\"ite\\t%D4\", operands); + output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands); + } + else if (which_alternative != 0) + { + output_asm_insn (\"ite\\t%D4\", operands); + output_asm_insn (\"mov%D4\\t%0, %1\", operands); + } + else + output_asm_insn (\"it\\t%d4\", operands); + return \"%i5%d4\\t%0, %1, #1\"; + " + [(set_attr "conds" "clob") + (set_attr "length" "14")] +) + +(define_insn "*thumb2_cond_sub" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r") + (match_operator:SI 4 "arm_comparison_operator" + [(match_operand:SI 2 "s_register_operand" "r,r") + (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + output_asm_insn (\"cmp\\t%2, %3\", operands); + if (which_alternative != 0) + { + output_asm_insn (\"ite\\t%D4\", operands); + output_asm_insn (\"mov%D4\\t%0, %1\", operands); + } + else + output_asm_insn (\"it\\t%d4\", operands); + return \"sub%d4\\t%0, %1, #1\"; + " + [(set_attr "conds" "clob") + (set_attr "length" "10,14")] +) + +(define_insn "*thumb2_negscc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (neg:SI (match_operator 3 "arm_comparison_operator" + [(match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "arm_rhs_operand" "rI")]))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx) + return \"asr\\t%0, %1, #31\"; + + if (GET_CODE (operands[3]) == NE) + return \"subs\\t%0, %1, %2\;it\\tne\;mvnne\\t%0, #0\"; + + output_asm_insn (\"cmp\\t%1, %2\", operands); + output_asm_insn (\"ite\\t%D3\", operands); + output_asm_insn (\"mov%D3\\t%0, #0\", operands); + return \"mvn%d3\\t%0, #0\"; + " + [(set_attr "conds" "clob") + (set_attr "length" "14")] +) + +(define_insn "*thumb2_movcond" + [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") + (if_then_else:SI + (match_operator 5 "arm_comparison_operator" + [(match_operand:SI 3 "s_register_operand" "r,r,r") + (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")]) + (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI") + (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + if (GET_CODE (operands[5]) == LT + && (operands[4] == const0_rtx)) + { + if (which_alternative != 1 && GET_CODE (operands[1]) == REG) + { + if (operands[2] == const0_rtx) + return \"and\\t%0, %1, %3, asr #31\"; + return \"ands\\t%0, %1, %3, asr #32\;it\\tcc\;movcc\\t%0, %2\"; + } + else if (which_alternative != 0 && GET_CODE (operands[2]) == REG) + { + if (operands[1] == const0_rtx) + return \"bic\\t%0, %2, %3, asr #31\"; + return \"bics\\t%0, %2, %3, asr #32\;it\\tcs\;movcs\\t%0, %1\"; + } + /* The only case that falls through to here is when both ops 1 & 2 + are constants. */ + } + + if (GET_CODE (operands[5]) == GE + && (operands[4] == const0_rtx)) + { + if (which_alternative != 1 && GET_CODE (operands[1]) == REG) + { + if (operands[2] == const0_rtx) + return \"bic\\t%0, %1, %3, asr #31\"; + return \"bics\\t%0, %1, %3, asr #32\;it\\tcs\;movcs\\t%0, %2\"; + } + else if (which_alternative != 0 && GET_CODE (operands[2]) == REG) + { + if (operands[1] == const0_rtx) + return \"and\\t%0, %2, %3, asr #31\"; + return \"ands\\t%0, %2, %3, asr #32\;it\tcc\;movcc\\t%0, %1\"; + } + /* The only case that falls through to here is when both ops 1 & 2 + are constants. */ + } + if (GET_CODE (operands[4]) == CONST_INT + && !const_ok_for_arm (INTVAL (operands[4]))) + output_asm_insn (\"cmn\\t%3, #%n4\", operands); + else + output_asm_insn (\"cmp\\t%3, %4\", operands); + switch (which_alternative) + { + case 0: + output_asm_insn (\"it\\t%D5\", operands); + break; + case 1: + output_asm_insn (\"it\\t%d5\", operands); + break; + case 2: + output_asm_insn (\"ite\\t%d5\", operands); + break; + default: + abort(); + } + if (which_alternative != 0) + output_asm_insn (\"mov%d5\\t%0, %1\", operands); + if (which_alternative != 1) + output_asm_insn (\"mov%D5\\t%0, %2\", operands); + return \"\"; + " + [(set_attr "conds" "clob") + (set_attr "length" "10,10,14")] +) + +;; Zero and sign extension instructions. + +;; All supported Thumb2 implementations are armv6, so only that case is +;; provided. +(define_insn "*thumb2_extendqisi_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))] + "TARGET_THUMB2 && arm_arch6" + "@ + sxtb%?\\t%0, %1 + ldr%(sb%)\\t%0, %1" + [(set_attr "type" "alu_shift,load_byte") + (set_attr "predicable" "yes") + (set_attr "pool_range" "*,4096") + (set_attr "neg_pool_range" "*,250")] +) + +(define_insn "*thumb2_zero_extendhisi2_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))] + "TARGET_THUMB2 && arm_arch6" + "@ + uxth%?\\t%0, %1 + ldr%(h%)\\t%0, %1" + [(set_attr "type" "alu_shift,load_byte") + (set_attr "predicable" "yes") + (set_attr "pool_range" "*,4096") + (set_attr "neg_pool_range" "*,250")] +) + +(define_insn "thumb2_zero_extendqisi2_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))] + "TARGET_THUMB2 && arm_arch6" + "@ + uxtb%(%)\\t%0, %1 + ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2" + [(set_attr "type" "alu_shift,load_byte") + (set_attr "predicable" "yes") + (set_attr "pool_range" "*,4096") + (set_attr "neg_pool_range" "*,250")] +) + +(define_insn "thumb2_casesi_internal" + [(parallel [(set (pc) + (if_then_else + (leu (match_operand:SI 0 "s_register_operand" "r") + (match_operand:SI 1 "arm_rhs_operand" "rI")) + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4)) + (label_ref (match_operand 2 "" "")))) + (label_ref (match_operand 3 "" "")))) + (clobber (reg:CC CC_REGNUM)) + (clobber (match_scratch:SI 4 "=&r")) + (use (label_ref (match_dup 2)))])] + "TARGET_THUMB2 && !flag_pic" + "* return thumb2_output_casesi(operands);" + [(set_attr "conds" "clob") + (set_attr "length" "16")] +) + +(define_insn "thumb2_casesi_internal_pic" + [(parallel [(set (pc) + (if_then_else + (leu (match_operand:SI 0 "s_register_operand" "r") + (match_operand:SI 1 "arm_rhs_operand" "rI")) + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4)) + (label_ref (match_operand 2 "" "")))) + (label_ref (match_operand 3 "" "")))) + (clobber (reg:CC CC_REGNUM)) + (clobber (match_scratch:SI 4 "=&r")) + (clobber (match_scratch:SI 5 "=r")) + (use (label_ref (match_dup 2)))])] + "TARGET_THUMB2 && flag_pic" + "* return thumb2_output_casesi(operands);" + [(set_attr "conds" "clob") + (set_attr "length" "20")] +) + +;; Note: this is not predicable, to avoid issues with linker-generated +;; interworking stubs. +(define_insn "*thumb2_return" + [(return)] + "TARGET_THUMB2 && USE_RETURN_INSN (FALSE)" + "* + { + return output_return_instruction (const_true_rtx, TRUE, FALSE); + }" + [(set_attr "type" "load1") + (set_attr "length" "12")] +) + +(define_insn_and_split "thumb2_eh_return" + [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")] + VUNSPEC_EH_RETURN) + (clobber (match_scratch:SI 1 "=&r"))] + "TARGET_THUMB2" + "#" + "&& reload_completed" + [(const_int 0)] + " + { + thumb_set_return_address (operands[0], operands[1]); + DONE; + }" +) + +(define_insn "*thumb2_alusi3_short" + [(set (match_operand:SI 0 "s_register_operand" "=l") + (match_operator:SI 3 "thumb_16bit_operator" + [(match_operand:SI 1 "s_register_operand" "0") + (match_operand:SI 2 "s_register_operand" "l")])) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed + && GET_CODE(operands[3]) != PLUS + && GET_CODE(operands[3]) != MINUS" + "%I3%!\\t%0, %1, %2" + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +;; Similarly for 16-bit shift instructions +;; There is no 16-bit rotate by immediate instruction. +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (match_operator:SI 3 "shift_operator" + [(match_operand:SI 1 "low_register_operand" "") + (match_operand:SI 2 "low_reg_or_int_operand" "")]))] + "TARGET_THUMB2 + && peep2_regno_dead_p(0, CC_REGNUM) + && (CONST_INT_P (operands[2]) || operands[1] == operands[0]) + && ((GET_CODE(operands[3]) != ROTATE && GET_CODE(operands[3]) != ROTATERT) + || REG_P(operands[2]))" + [(parallel + [(set (match_dup 0) + (match_op_dup 3 + [(match_dup 1) + (match_dup 2)])) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_shiftsi3_short" + [(set (match_operand:SI 0 "low_register_operand" "=l,l") + (match_operator:SI 3 "shift_operator" + [(match_operand:SI 1 "low_register_operand" "0,l") + (match_operand:SI 2 "low_reg_or_int_operand" "l,M")])) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed + && ((GET_CODE(operands[3]) != ROTATE && GET_CODE(operands[3]) != ROTATERT) + || REG_P(operands[2]))" + "* return arm_output_shift(operands, 2);" + [(set_attr "predicable" "yes") + (set_attr "shift" "1") + (set_attr "length" "2") + (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") + (const_string "alu_shift") + (const_string "alu_shift_reg")))] +) + +;; 16-bit load immediate +(define_peephole2 + [(set (match_operand:QHSI 0 "low_register_operand" "") + (match_operand:QHSI 1 "const_int_operand" ""))] + "TARGET_THUMB2 + && peep2_regno_dead_p(0, CC_REGNUM) + && (unsigned HOST_WIDE_INT) INTVAL(operands[1]) < 256" + [(parallel + [(set (match_dup 0) + (match_dup 1)) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_mov_shortim" + [(set (match_operand:QHSI 0 "low_register_operand" "=l") + (match_operand:QHSI 1 "const_int_operand" "I")) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed" + "mov%!\t%0, %1" + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +;; 16-bit add/sub immediate +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (plus:SI (match_operand:SI 1 "low_register_operand" "") + (match_operand:SI 2 "const_int_operand" "")))] + "TARGET_THUMB2 + && peep2_regno_dead_p(0, CC_REGNUM) + && ((rtx_equal_p(operands[0], operands[1]) + && INTVAL(operands[2]) > -256 && INTVAL(operands[2]) < 256) + || (INTVAL(operands[2]) > -8 && INTVAL(operands[2]) < 8))" + [(parallel + [(set (match_dup 0) + (plus:SI (match_dup 1) + (match_dup 2))) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_addsi_short" + [(set (match_operand:SI 0 "low_register_operand" "=l,l") + (plus:SI (match_operand:SI 1 "low_register_operand" "l,0") + (match_operand:SI 2 "low_reg_or_int_operand" "lPt,Ps"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed" + "* + HOST_WIDE_INT val; + + if (GET_CODE (operands[2]) == CONST_INT) + val = INTVAL(operands[2]); + else + val = 0; + + /* We prefer eg. subs rn, rn, #1 over adds rn, rn, #0xffffffff. */ + if (val < 0 && const_ok_for_arm(ARM_SIGN_EXTEND (-val))) + return \"sub%!\\t%0, %1, #%n2\"; + else + return \"add%!\\t%0, %1, %2\"; + " + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +(define_insn "divsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (div:SI (match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "s_register_operand" "r")))] + "TARGET_THUMB2 && arm_arch_hwdiv" + "sdiv%?\t%0, %1, %2" + [(set_attr "predicable" "yes") + (set_attr "insn" "sdiv")] +) + +(define_insn "udivsi3" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (udiv:SI (match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "s_register_operand" "r")))] + "TARGET_THUMB2 && arm_arch_hwdiv" + "udiv%?\t%0, %1, %2" + [(set_attr "predicable" "yes") + (set_attr "insn" "udiv")] +) + +(define_insn "*thumb2_subsi_short" + [(set (match_operand:SI 0 "low_register_operand" "=l") + (minus:SI (match_operand:SI 1 "low_register_operand" "l") + (match_operand:SI 2 "low_register_operand" "l"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed" + "sub%!\\t%0, %1, %2" + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +(define_peephole2 + [(set (match_operand:CC 0 "cc_register" "") + (compare:CC (match_operand:SI 1 "low_register_operand" "") + (match_operand:SI 2 "const_int_operand" "")))] + "TARGET_THUMB2 + && peep2_reg_dead_p (1, operands[1]) + && satisfies_constraint_Pw (operands[2])" + [(parallel + [(set (match_dup 0) (compare:CC (match_dup 1) (match_dup 2))) + (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 3)))])] + "operands[3] = GEN_INT (- INTVAL (operands[2]));" +) + +(define_peephole2 + [(match_scratch:SI 3 "l") + (set (match_operand:CC 0 "cc_register" "") + (compare:CC (match_operand:SI 1 "low_register_operand" "") + (match_operand:SI 2 "const_int_operand" "")))] + "TARGET_THUMB2 + && satisfies_constraint_Px (operands[2])" + [(parallel + [(set (match_dup 0) (compare:CC (match_dup 1) (match_dup 2))) + (set (match_dup 3) (plus:SI (match_dup 1) (match_dup 4)))])] + "operands[4] = GEN_INT (- INTVAL (operands[2]));" +) + +(define_insn "*thumb2_addsi3_compare0" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (plus:SI (match_operand:SI 1 "s_register_operand" "l, 0, r") + (match_operand:SI 2 "arm_add_operand" "lPt,Ps,rIL")) + (const_int 0))) + (set (match_operand:SI 0 "s_register_operand" "=l,l,r") + (plus:SI (match_dup 1) (match_dup 2)))] + "TARGET_THUMB2" + "* + HOST_WIDE_INT val; + + if (GET_CODE (operands[2]) == CONST_INT) + val = INTVAL (operands[2]); + else + val = 0; + + if (val < 0 && const_ok_for_arm (ARM_SIGN_EXTEND (-val))) + return \"subs\\t%0, %1, #%n2\"; + else + return \"adds\\t%0, %1, %2\"; + " + [(set_attr "conds" "set") + (set_attr "length" "2,2,4")] +) + +(define_insn "*thumb2_addsi3_compare0_scratch" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (plus:SI (match_operand:SI 0 "s_register_operand" "l, r") + (match_operand:SI 1 "arm_add_operand" "lPv,rIL")) + (const_int 0)))] + "TARGET_THUMB2" + "* + HOST_WIDE_INT val; + + if (GET_CODE (operands[1]) == CONST_INT) + val = INTVAL (operands[1]); + else + val = 0; + + if (val < 0 && const_ok_for_arm (ARM_SIGN_EXTEND (-val))) + return \"cmp\\t%0, #%n1\"; + else + return \"cmn\\t%0, %1\"; + " + [(set_attr "conds" "set") + (set_attr "length" "2,4")] +) + +;; 16-bit encodings of "muls" and "mul". We only use these when +;; optimizing for size since "muls" is slow on all known +;; implementations and since "mul" will be generated by +;; "*arm_mulsi3_v6" anyhow. The assembler will use a 16-bit encoding +;; for "mul" whenever possible anyhow. +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (mult:SI (match_operand:SI 1 "low_register_operand" "") + (match_dup 0)))] + "TARGET_THUMB2 && optimize_size && peep2_regno_dead_p (0, CC_REGNUM)" + [(parallel + [(set (match_dup 0) + (mult:SI (match_dup 0) (match_dup 1))) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (mult:SI (match_dup 0) + (match_operand:SI 1 "low_register_operand" "")))] + "TARGET_THUMB2 && optimize_size && peep2_regno_dead_p (0, CC_REGNUM)" + [(parallel + [(set (match_dup 0) + (mult:SI (match_dup 0) (match_dup 1))) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_mulsi_short" + [(set (match_operand:SI 0 "low_register_operand" "=l") + (mult:SI (match_operand:SI 1 "low_register_operand" "%0") + (match_operand:SI 2 "low_register_operand" "l"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && optimize_size && reload_completed" + "mul%!\\t%0, %2, %0" + [(set_attr "predicable" "yes") + (set_attr "length" "2") + (set_attr "insn" "muls")]) + +(define_insn "*thumb2_mulsi_short_compare0" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (mult:SI (match_operand:SI 1 "register_operand" "%0") + (match_operand:SI 2 "register_operand" "l")) + (const_int 0))) + (set (match_operand:SI 0 "register_operand" "=l") + (mult:SI (match_dup 1) (match_dup 2)))] + "TARGET_THUMB2 && optimize_size" + "muls\\t%0, %2, %0" + [(set_attr "length" "2") + (set_attr "insn" "muls")]) + +(define_insn "*thumb2_mulsi_short_compare0_scratch" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (mult:SI (match_operand:SI 1 "register_operand" "%0") + (match_operand:SI 2 "register_operand" "l")) + (const_int 0))) + (clobber (match_scratch:SI 0 "=l"))] + "TARGET_THUMB2 && optimize_size" + "muls\\t%0, %2, %0" + [(set_attr "length" "2") + (set_attr "insn" "muls")]) + +(define_insn "*thumb2_cbz" + [(set (pc) (if_then_else + (eq (match_operand:SI 0 "s_register_operand" "l,?r") + (const_int 0)) + (label_ref (match_operand 1 "" "")) + (pc))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + if (get_attr_length (insn) == 2) + return \"cbz\\t%0, %l1\"; + else + return \"cmp\\t%0, #0\;beq\\t%l1\"; + " + [(set (attr "length") + (if_then_else + (and (ge (minus (match_dup 1) (pc)) (const_int 2)) + (le (minus (match_dup 1) (pc)) (const_int 128)) + (eq (symbol_ref ("which_alternative")) (const_int 0))) + (const_int 2) + (const_int 8)))] +) + +(define_insn "*thumb2_cbnz" + [(set (pc) (if_then_else + (ne (match_operand:SI 0 "s_register_operand" "l,?r") + (const_int 0)) + (label_ref (match_operand 1 "" "")) + (pc))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2" + "* + if (get_attr_length (insn) == 2) + return \"cbnz\\t%0, %l1\"; + else + return \"cmp\\t%0, #0\;bne\\t%l1\"; + " + [(set (attr "length") + (if_then_else + (and (ge (minus (match_dup 1) (pc)) (const_int 2)) + (le (minus (match_dup 1) (pc)) (const_int 128)) + (eq (symbol_ref ("which_alternative")) (const_int 0))) + (const_int 2) + (const_int 8)))] +) + +;; 16-bit complement +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (not:SI (match_operand:SI 1 "low_register_operand" "")))] + "TARGET_THUMB2 + && peep2_regno_dead_p(0, CC_REGNUM)" + [(parallel + [(set (match_dup 0) + (not:SI (match_dup 1))) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_one_cmplsi2_short" + [(set (match_operand:SI 0 "low_register_operand" "=l") + (not:SI (match_operand:SI 1 "low_register_operand" "l"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed" + "mvn%!\t%0, %1" + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +;; 16-bit negate +(define_peephole2 + [(set (match_operand:SI 0 "low_register_operand" "") + (neg:SI (match_operand:SI 1 "low_register_operand" "")))] + "TARGET_THUMB2 + && peep2_regno_dead_p(0, CC_REGNUM)" + [(parallel + [(set (match_dup 0) + (neg:SI (match_dup 1))) + (clobber (reg:CC CC_REGNUM))])] + "" +) + +(define_insn "*thumb2_negsi2_short" + [(set (match_operand:SI 0 "low_register_operand" "=l") + (neg:SI (match_operand:SI 1 "low_register_operand" "l"))) + (clobber (reg:CC CC_REGNUM))] + "TARGET_THUMB2 && reload_completed" + "neg%!\t%0, %1" + [(set_attr "predicable" "yes") + (set_attr "length" "2")] +) + +(define_insn "*orsi_notsi_si" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (ior:SI (not:SI (match_operand:SI 2 "s_register_operand" "r")) + (match_operand:SI 1 "s_register_operand" "r")))] + "TARGET_THUMB2" + "orn%?\\t%0, %1, %2" + [(set_attr "predicable" "yes")] +) + +(define_insn "*orsi_not_shiftsi_si" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (ior:SI (not:SI (match_operator:SI 4 "shift_operator" + [(match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 3 "const_int_operand" "M")])) + (match_operand:SI 1 "s_register_operand" "r")))] + "TARGET_THUMB2" + "orn%?\\t%0, %1, %2%S4" + [(set_attr "predicable" "yes") + (set_attr "shift" "2") + (set_attr "type" "alu_shift")] +) + +(define_peephole2 + [(set (match_operand:CC_NOOV 0 "cc_register" "") + (compare:CC_NOOV (zero_extract:SI + (match_operand:SI 1 "low_register_operand" "") + (const_int 1) + (match_operand:SI 2 "const_int_operand" "")) + (const_int 0))) + (match_scratch:SI 3 "l") + (set (pc) + (if_then_else (match_operator:CC_NOOV 4 "equality_operator" + [(match_dup 0) (const_int 0)]) + (match_operand 5 "" "") + (match_operand 6 "" "")))] + "TARGET_THUMB2 + && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32)" + [(parallel [(set (match_dup 0) + (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2)) + (const_int 0))) + (clobber (match_dup 3))]) + (set (pc) + (if_then_else (match_op_dup 4 [(match_dup 0) (const_int 0)]) + (match_dup 5) (match_dup 6)))] + " + operands[2] = GEN_INT (31 - INTVAL (operands[2])); + operands[4] = gen_rtx_fmt_ee (GET_CODE (operands[4]) == NE ? LT : GE, + VOIDmode, operands[0], const0_rtx); + ") + +(define_peephole2 + [(set (match_operand:CC_NOOV 0 "cc_register" "") + (compare:CC_NOOV (zero_extract:SI + (match_operand:SI 1 "low_register_operand" "") + (match_operand:SI 2 "const_int_operand" "") + (const_int 0)) + (const_int 0))) + (match_scratch:SI 3 "l") + (set (pc) + (if_then_else (match_operator:CC_NOOV 4 "equality_operator" + [(match_dup 0) (const_int 0)]) + (match_operand 5 "" "") + (match_operand 6 "" "")))] + "TARGET_THUMB2 + && (INTVAL (operands[2]) > 0 && INTVAL (operands[2]) < 32)" + [(parallel [(set (match_dup 0) + (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2)) + (const_int 0))) + (clobber (match_dup 3))]) + (set (pc) + (if_then_else (match_op_dup 4 [(match_dup 0) (const_int 0)]) + (match_dup 5) (match_dup 6)))] + " + operands[2] = GEN_INT (32 - INTVAL (operands[2])); + ") -- cgit v1.2.3