2
0

100-PR-rtl-optimization-83496.patch 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. From: ebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>
  2. Date: Mon, 26 Feb 2018 16:29:30 +0000
  3. Subject: [PATCH] PR rtl-optimization/83496 * reorg.c
  4. (steal_delay_list_from_target): Change REDUNDANT array from booleans to
  5. RTXes. Call fix_reg_dead_note on every non-null element.
  6. (steal_delay_list_from_fallthrough): Call fix_reg_dead_note on a
  7. redundant insn, if any. (relax_delay_slots): Likewise.
  8. (update_reg_unused_notes): Rename REDUNDANT_INSN to OTHER_INSN.
  9. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@257996 138bc75d-0d04-0410-961f-82ee72b054a4
  10. ---
  11. create mode 120000 gcc/testsuite/gcc.c-torture/execute/20180226-1.c
  12. --- a/gcc/reorg.c
  13. +++ b/gcc/reorg.c
  14. @@ -1035,7 +1035,8 @@ check_annul_list_true_false (int annul_t
  15. static void
  16. steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
  17. - vec<rtx_insn *> *delay_list, resources *sets,
  18. + vec<rtx_insn *> *delay_list,
  19. + struct resources *sets,
  20. struct resources *needed,
  21. struct resources *other_needed,
  22. int slots_to_fill, int *pslots_filled,
  23. @@ -1048,7 +1049,7 @@ steal_delay_list_from_target (rtx_insn *
  24. int used_annul = 0;
  25. int i;
  26. struct resources cc_set;
  27. - bool *redundant;
  28. + rtx_insn **redundant;
  29. /* We can't do anything if there are more delay slots in SEQ than we
  30. can handle, or if we don't know that it will be a taken branch.
  31. @@ -1087,7 +1088,7 @@ steal_delay_list_from_target (rtx_insn *
  32. if (! targetm.can_follow_jump (insn, seq->insn (0)))
  33. return;
  34. - redundant = XALLOCAVEC (bool, XVECLEN (seq, 0));
  35. + redundant = XALLOCAVEC (rtx_insn *, XVECLEN (seq, 0));
  36. for (i = 1; i < seq->len (); i++)
  37. {
  38. rtx_insn *trial = seq->insn (i);
  39. @@ -1151,7 +1152,10 @@ steal_delay_list_from_target (rtx_insn *
  40. we therefore decided not to copy. */
  41. for (i = 1; i < seq->len (); i++)
  42. if (redundant[i])
  43. - update_block (seq->insn (i), insn);
  44. + {
  45. + fix_reg_dead_note (redundant[i], insn);
  46. + update_block (seq->insn (i), insn);
  47. + }
  48. /* Show the place to which we will be branching. */
  49. *pnew_thread = first_active_target_insn (JUMP_LABEL (seq->insn (0)));
  50. @@ -1198,6 +1202,7 @@ steal_delay_list_from_fallthrough (rtx_i
  51. for (i = 1; i < seq->len (); i++)
  52. {
  53. rtx_insn *trial = seq->insn (i);
  54. + rtx_insn *prior_insn;
  55. /* If TRIAL sets CC0, stealing it will move it too far from the use
  56. of CC0. */
  57. @@ -1209,8 +1214,9 @@ steal_delay_list_from_fallthrough (rtx_i
  58. break;
  59. /* If this insn was already done, we don't need it. */
  60. - if (redundant_insn (trial, insn, *delay_list))
  61. + if ((prior_insn = redundant_insn (trial, insn, *delay_list)))
  62. {
  63. + fix_reg_dead_note (prior_insn, insn);
  64. update_block (trial, insn);
  65. delete_from_delay_slot (trial);
  66. continue;
  67. @@ -1790,15 +1796,14 @@ fix_reg_dead_note (rtx_insn *start_insn,
  68. }
  69. }
  70. -/* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
  71. +/* Delete any REG_UNUSED notes that exist on INSN but not on OTHER_INSN.
  72. This handles the case of udivmodXi4 instructions which optimize their
  73. - output depending on whether any REG_UNUSED notes are present.
  74. - we must make sure that INSN calculates as many results as REDUNDANT_INSN
  75. - does. */
  76. + output depending on whether any REG_UNUSED notes are present. We must
  77. + make sure that INSN calculates as many results as OTHER_INSN does. */
  78. static void
  79. -update_reg_unused_notes (rtx_insn *insn, rtx redundant_insn)
  80. +update_reg_unused_notes (rtx_insn *insn, rtx other_insn)
  81. {
  82. rtx link, next;
  83. @@ -1810,8 +1815,7 @@ update_reg_unused_notes (rtx_insn *insn,
  84. || !REG_P (XEXP (link, 0)))
  85. continue;
  86. - if (! find_regno_note (redundant_insn, REG_UNUSED,
  87. - REGNO (XEXP (link, 0))))
  88. + if (!find_regno_note (other_insn, REG_UNUSED, REGNO (XEXP (link, 0))))
  89. remove_note (insn, link);
  90. }
  91. }
  92. @@ -2324,9 +2328,8 @@ follow_jumps (rtx label, rtx_insn *jump,
  93. taken and THREAD_IF_TRUE is set. This is used for the branch at the
  94. end of a loop back up to the top.
  95. - OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
  96. - thread. I.e., it is the fallthrough code of our jump or the target of the
  97. - jump when we are the only jump going there.
  98. + OWN_THREAD is true if we are the only user of the thread, i.e. it is
  99. + the target of the jump when we are the only jump going there.
  100. If OWN_THREAD is false, it must be the "true" thread of a jump. In that
  101. case, we can only take insns from the head of the thread for our delay
  102. @@ -3117,7 +3120,7 @@ relax_delay_slots (rtx_insn *first)
  103. /* Look at every JUMP_INSN and see if we can improve it. */
  104. for (insn = first; insn; insn = next)
  105. {
  106. - rtx_insn *other;
  107. + rtx_insn *other, *prior_insn;
  108. bool crossing;
  109. next = next_active_insn (insn);
  110. @@ -3223,8 +3226,9 @@ relax_delay_slots (rtx_insn *first)
  111. /* See if the first insn in the delay slot is redundant with some
  112. previous insn. Remove it from the delay slot if so; then set up
  113. to reprocess this insn. */
  114. - if (redundant_insn (pat->insn (1), delay_insn, vNULL))
  115. + if ((prior_insn = redundant_insn (pat->insn (1), delay_insn, vNULL)))
  116. {
  117. + fix_reg_dead_note (prior_insn, insn);
  118. update_block (pat->insn (1), insn);
  119. delete_from_delay_slot (pat->insn (1));
  120. next = prev_active_insn (next);