source: trunk/src/gcc/gcc/jump.c@ 1392

Last change on this file since 1392 was 1392, checked in by bird, 21 years ago

This commit was generated by cvs2svn to compensate for changes in r1391,
which included commits to RCS files with non-trunk default branches.

  • Property cvs2svn:cvs-rev set to 1.1.1.2
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 62.5 KB
Line 
1/* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
36
37#include "config.h"
38#include "system.h"
39#include "rtl.h"
40#include "tm_p.h"
41#include "flags.h"
42#include "hard-reg-set.h"
43#include "regs.h"
44#include "insn-config.h"
45#include "insn-attr.h"
46#include "recog.h"
47#include "function.h"
48#include "expr.h"
49#include "real.h"
50#include "except.h"
51#include "toplev.h"
52#include "reload.h"
53#include "predict.h"
54
55/* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57/* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
61
62static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
63static int init_label_info PARAMS ((rtx));
64static void mark_all_labels PARAMS ((rtx));
65static int duplicate_loop_exit_test PARAMS ((rtx));
66static void delete_computation PARAMS ((rtx));
67static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
68static int redirect_exp PARAMS ((rtx, rtx, rtx));
69static void invert_exp_1 PARAMS ((rtx));
70static int invert_exp PARAMS ((rtx));
71static int returnjump_p_1 PARAMS ((rtx *, void *));
72static void delete_prior_computation PARAMS ((rtx, rtx));
73
74
75/* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 instructions. */
78void
79rebuild_jump_labels (f)
80 rtx f;
81{
82 rtx insn;
83 int max_uid = 0;
84
85 max_uid = init_label_info (f) + 1;
86
87 mark_all_labels (f);
88
89 /* Keep track of labels used from static data; we don't track them
90 closely enough to delete them here, so make sure their reference
91 count doesn't drop to zero. */
92
93 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
95 LABEL_NUSES (XEXP (insn, 0))++;
96}
97
98
99/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100 non-fallthru insn. This is not generally true, as multiple barriers
101 may have crept in, or the BARRIER may be separated from the last
102 real insn by one or more NOTEs.
103
104 This simple pass moves barriers and removes duplicates so that the
105 old code is happy.
106 */
107void
108cleanup_barriers ()
109{
110 rtx insn, next, prev;
111 for (insn = get_insns (); insn; insn = next)
112 {
113 next = NEXT_INSN (insn);
114 if (GET_CODE (insn) == BARRIER)
115 {
116 prev = prev_nonnote_insn (insn);
117 if (GET_CODE (prev) == BARRIER)
118 delete_barrier (insn);
119 else if (prev != PREV_INSN (insn))
120 reorder_insns (insn, insn, prev);
121 }
122 }
123}
124
125
126/* Return the next insn after INSN that is not a NOTE and is in the loop,
127 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
128 This routine does not look inside SEQUENCEs. */
129
130static rtx
131next_nonnote_insn_in_loop (insn)
132 rtx insn;
133{
134 while (insn)
135 {
136 insn = NEXT_INSN (insn);
137 if (insn == 0 || GET_CODE (insn) != NOTE)
138 break;
139 if (GET_CODE (insn) == NOTE
140 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
141 return NULL_RTX;
142 }
143
144 return insn;
145}
146
147void
148copy_loop_headers (f)
149 rtx f;
150{
151 rtx insn, next;
152 /* Now iterate optimizing jumps until nothing changes over one pass. */
153 for (insn = f; insn; insn = next)
154 {
155 rtx temp, temp1;
156
157 next = NEXT_INSN (insn);
158
159 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
160 jump. Try to optimize by duplicating the loop exit test if so.
161 This is only safe immediately after regscan, because it uses
162 the values of regno_first_uid and regno_last_uid. */
163 if (GET_CODE (insn) == NOTE
164 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
165 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
166 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
167 {
168 temp = PREV_INSN (insn);
169 if (duplicate_loop_exit_test (insn))
170 {
171 next = NEXT_INSN (temp);
172 }
173 }
174 }
175}
176
177void
178purge_line_number_notes (f)
179 rtx f;
180{
181 rtx last_note = 0;
182 rtx insn;
183 /* Delete extraneous line number notes.
184 Note that two consecutive notes for different lines are not really
185 extraneous. There should be some indication where that line belonged,
186 even if it became empty. */
187
188 for (insn = f; insn; insn = NEXT_INSN (insn))
189 if (GET_CODE (insn) == NOTE)
190 {
191 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
192 /* Any previous line note was for the prologue; gdb wants a new
193 note after the prologue even if it is for the same line. */
194 last_note = NULL_RTX;
195 else if (NOTE_LINE_NUMBER (insn) >= 0)
196 {
197 /* Delete this note if it is identical to previous note. */
198 if (last_note
199 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
200 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
201 {
202 delete_related_insns (insn);
203 continue;
204 }
205
206 last_note = insn;
207 }
208 }
209}
210
211
212/* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
213 notes whose labels don't occur in the insn any more. Returns the
214 largest INSN_UID found. */
215static int
216init_label_info (f)
217 rtx f;
218{
219 int largest_uid = 0;
220 rtx insn;
221
222 for (insn = f; insn; insn = NEXT_INSN (insn))
223 {
224 if (GET_CODE (insn) == CODE_LABEL)
225 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
226 else if (GET_CODE (insn) == JUMP_INSN)
227 JUMP_LABEL (insn) = 0;
228 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
229 {
230 rtx note, next;
231
232 for (note = REG_NOTES (insn); note; note = next)
233 {
234 next = XEXP (note, 1);
235 if (REG_NOTE_KIND (note) == REG_LABEL
236 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
237 remove_note (insn, note);
238 }
239 }
240 if (INSN_UID (insn) > largest_uid)
241 largest_uid = INSN_UID (insn);
242 }
243
244 return largest_uid;
245}
246
247/* Mark the label each jump jumps to.
248 Combine consecutive labels, and count uses of labels. */
249
250static void
251mark_all_labels (f)
252 rtx f;
253{
254 rtx insn;
255
256 for (insn = f; insn; insn = NEXT_INSN (insn))
257 if (INSN_P (insn))
258 {
259 if (GET_CODE (insn) == CALL_INSN
260 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
261 {
262 mark_all_labels (XEXP (PATTERN (insn), 0));
263 mark_all_labels (XEXP (PATTERN (insn), 1));
264 mark_all_labels (XEXP (PATTERN (insn), 2));
265
266 /* Canonicalize the tail recursion label attached to the
267 CALL_PLACEHOLDER insn. */
268 if (XEXP (PATTERN (insn), 3))
269 {
270 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
271 XEXP (PATTERN (insn), 3));
272 mark_jump_label (label_ref, insn, 0);
273 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
274 }
275
276 continue;
277 }
278
279 mark_jump_label (PATTERN (insn), insn, 0);
280 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
281 {
282 /* When we know the LABEL_REF contained in a REG used in
283 an indirect jump, we'll have a REG_LABEL note so that
284 flow can tell where it's going. */
285 if (JUMP_LABEL (insn) == 0)
286 {
287 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
288 if (label_note)
289 {
290 /* But a LABEL_REF around the REG_LABEL note, so
291 that we can canonicalize it. */
292 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
293 XEXP (label_note, 0));
294
295 mark_jump_label (label_ref, insn, 0);
296 XEXP (label_note, 0) = XEXP (label_ref, 0);
297 JUMP_LABEL (insn) = XEXP (label_note, 0);
298 }
299 }
300 }
301 }
302}
303
304/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
305 jump. Assume that this unconditional jump is to the exit test code. If
306 the code is sufficiently simple, make a copy of it before INSN,
307 followed by a jump to the exit of the loop. Then delete the unconditional
308 jump after INSN.
309
310 Return 1 if we made the change, else 0.
311
312 This is only safe immediately after a regscan pass because it uses the
313 values of regno_first_uid and regno_last_uid. */
314
315static int
316duplicate_loop_exit_test (loop_start)
317 rtx loop_start;
318{
319 rtx insn, set, reg, p, link;
320 rtx copy = 0, first_copy = 0;
321 int num_insns = 0;
322 rtx exitcode
323 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
324 rtx lastexit;
325 int max_reg = max_reg_num ();
326 rtx *reg_map = 0;
327 rtx loop_pre_header_label;
328
329 /* Scan the exit code. We do not perform this optimization if any insn:
330
331 is a CALL_INSN
332 is a CODE_LABEL
333 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
334 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
335
336 We also do not do this if we find an insn with ASM_OPERANDS. While
337 this restriction should not be necessary, copying an insn with
338 ASM_OPERANDS can confuse asm_noperands in some cases.
339
340 Also, don't do this if the exit code is more than 20 insns. */
341
342 for (insn = exitcode;
343 insn
344 && ! (GET_CODE (insn) == NOTE
345 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
346 insn = NEXT_INSN (insn))
347 {
348 switch (GET_CODE (insn))
349 {
350 case CODE_LABEL:
351 case CALL_INSN:
352 return 0;
353 case NOTE:
354
355 if (optimize < 2
356 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
357 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
358 /* If we were to duplicate this code, we would not move
359 the BLOCK notes, and so debugging the moved code would
360 be difficult. Thus, we only move the code with -O2 or
361 higher. */
362 return 0;
363
364 break;
365 case JUMP_INSN:
366 case INSN:
367 /* The code below would grossly mishandle REG_WAS_0 notes,
368 so get rid of them here. */
369 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
370 remove_note (insn, p);
371 if (++num_insns > 20
372 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
373 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
374 return 0;
375 break;
376 default:
377 break;
378 }
379 }
380
381 /* Unless INSN is zero, we can do the optimization. */
382 if (insn == 0)
383 return 0;
384
385 lastexit = insn;
386
387 /* See if any insn sets a register only used in the loop exit code and
388 not a user variable. If so, replace it with a new register. */
389 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
390 if (GET_CODE (insn) == INSN
391 && (set = single_set (insn)) != 0
392 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
393 || (GET_CODE (reg) == SUBREG
394 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
395 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
396 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
397 {
398 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
399 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
400 break;
401
402 if (p != lastexit)
403 {
404 /* We can do the replacement. Allocate reg_map if this is the
405 first replacement we found. */
406 if (reg_map == 0)
407 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
408
409 REG_LOOP_TEST_P (reg) = 1;
410
411 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
412 }
413 }
414 loop_pre_header_label = gen_label_rtx ();
415
416 /* Now copy each insn. */
417 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
418 {
419 switch (GET_CODE (insn))
420 {
421 case BARRIER:
422 copy = emit_barrier_before (loop_start);
423 break;
424 case NOTE:
425 /* Only copy line-number notes. */
426 if (NOTE_LINE_NUMBER (insn) >= 0)
427 {
428 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
429 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
430 }
431 break;
432
433 case INSN:
434 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
435 if (reg_map)
436 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
437
438 mark_jump_label (PATTERN (copy), copy, 0);
439 INSN_SCOPE (copy) = INSN_SCOPE (insn);
440
441 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
442 make them. */
443 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
444 if (REG_NOTE_KIND (link) != REG_LABEL)
445 {
446 if (GET_CODE (link) == EXPR_LIST)
447 REG_NOTES (copy)
448 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
449 XEXP (link, 0),
450 REG_NOTES (copy)));
451 else
452 REG_NOTES (copy)
453 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
454 XEXP (link, 0),
455 REG_NOTES (copy)));
456 }
457
458 if (reg_map && REG_NOTES (copy))
459 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
460 break;
461
462 case JUMP_INSN:
463 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
464 loop_start);
465 INSN_SCOPE (copy) = INSN_SCOPE (insn);
466 if (reg_map)
467 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
468 mark_jump_label (PATTERN (copy), copy, 0);
469 if (REG_NOTES (insn))
470 {
471 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
472 if (reg_map)
473 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
474 }
475
476 /* Predict conditional jump that do make loop looping as taken.
477 Other jumps are probably exit conditions, so predict
478 them as untaken. */
479 if (any_condjump_p (copy))
480 {
481 rtx label = JUMP_LABEL (copy);
482 if (label)
483 {
484 /* The jump_insn after loop_start should be followed
485 by barrier and loopback label. */
486 if (prev_nonnote_insn (label)
487 && (prev_nonnote_insn (prev_nonnote_insn (label))
488 == next_nonnote_insn (loop_start)))
489 {
490 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
491 /* To keep pre-header, we need to redirect all loop
492 entrances before the LOOP_BEG note. */
493 redirect_jump (copy, loop_pre_header_label, 0);
494 }
495 else
496 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
497 }
498 }
499 break;
500
501 default:
502 abort ();
503 }
504
505 /* Record the first insn we copied. We need it so that we can
506 scan the copied insns for new pseudo registers. */
507 if (! first_copy)
508 first_copy = copy;
509 }
510
511 /* Now clean up by emitting a jump to the end label and deleting the jump
512 at the start of the loop. */
513 if (! copy || GET_CODE (copy) != BARRIER)
514 {
515 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
516 loop_start);
517
518 /* Record the first insn we copied. We need it so that we can
519 scan the copied insns for new pseudo registers. This may not
520 be strictly necessary since we should have copied at least one
521 insn above. But I am going to be safe. */
522 if (! first_copy)
523 first_copy = copy;
524
525 mark_jump_label (PATTERN (copy), copy, 0);
526 emit_barrier_before (loop_start);
527 }
528
529 emit_label_before (loop_pre_header_label, loop_start);
530
531 /* Now scan from the first insn we copied to the last insn we copied
532 (copy) for new pseudo registers. Do this after the code to jump to
533 the end label since that might create a new pseudo too. */
534 reg_scan_update (first_copy, copy, max_reg);
535
536 /* Mark the exit code as the virtual top of the converted loop. */
537 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
538
539 delete_related_insns (next_nonnote_insn (loop_start));
540
541 /* Clean up. */
542 if (reg_map)
543 free (reg_map);
544
545 return 1;
546}
547
548
549/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
550 notes between START and END out before START. START and END may be such
551 notes. Returns the values of the new starting and ending insns, which
552 may be different if the original ones were such notes.
553 Return true if there were only such notes and no real instructions. */
554
555bool
556squeeze_notes (startp, endp)
557 rtx* startp;
558 rtx* endp;
559{
560 rtx start = *startp;
561 rtx end = *endp;
562
563 rtx insn;
564 rtx next;
565 rtx last = NULL;
566 rtx past_end = NEXT_INSN (end);
567
568 for (insn = start; insn != past_end; insn = next)
569 {
570 next = NEXT_INSN (insn);
571 if (GET_CODE (insn) == NOTE
572 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
573 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
574 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
575 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
576 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
577 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
578 {
579 if (insn == start)
580 start = next;
581 else
582 {
583 rtx prev = PREV_INSN (insn);
584 PREV_INSN (insn) = PREV_INSN (start);
585 NEXT_INSN (insn) = start;
586 NEXT_INSN (PREV_INSN (insn)) = insn;
587 PREV_INSN (NEXT_INSN (insn)) = insn;
588 NEXT_INSN (prev) = next;
589 PREV_INSN (next) = prev;
590 }
591 }
592 else
593 last = insn;
594 }
595
596 /* There were no real instructions. */
597 if (start == past_end)
598 return true;
599
600 end = last;
601
602 *startp = start;
603 *endp = end;
604 return false;
605}
606
607
608/* Return the label before INSN, or put a new label there. */
609
610rtx
611get_label_before (insn)
612 rtx insn;
613{
614 rtx label;
615
616 /* Find an existing label at this point
617 or make a new one if there is none. */
618 label = prev_nonnote_insn (insn);
619
620 if (label == 0 || GET_CODE (label) != CODE_LABEL)
621 {
622 rtx prev = PREV_INSN (insn);
623
624 label = gen_label_rtx ();
625 emit_label_after (label, prev);
626 LABEL_NUSES (label) = 0;
627 }
628 return label;
629}
630
631/* Return the label after INSN, or put a new label there. */
632
633rtx
634get_label_after (insn)
635 rtx insn;
636{
637 rtx label;
638
639 /* Find an existing label at this point
640 or make a new one if there is none. */
641 label = next_nonnote_insn (insn);
642
643 if (label == 0 || GET_CODE (label) != CODE_LABEL)
644 {
645 label = gen_label_rtx ();
646 emit_label_after (label, insn);
647 LABEL_NUSES (label) = 0;
648 }
649 return label;
650}
651
652
653/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
654 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
655 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
656 know whether it's source is floating point or integer comparison. Machine
657 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
658 to help this function avoid overhead in these cases. */
659enum rtx_code
660reversed_comparison_code_parts (code, arg0, arg1, insn)
661 rtx insn, arg0, arg1;
662 enum rtx_code code;
663{
664 enum machine_mode mode;
665
666 /* If this is not actually a comparison, we can't reverse it. */
667 if (GET_RTX_CLASS (code) != '<')
668 return UNKNOWN;
669
670 mode = GET_MODE (arg0);
671 if (mode == VOIDmode)
672 mode = GET_MODE (arg1);
673
674 /* First see if machine description supply us way to reverse the comparison.
675 Give it priority over everything else to allow machine description to do
676 tricks. */
677#ifdef REVERSIBLE_CC_MODE
678 if (GET_MODE_CLASS (mode) == MODE_CC
679 && REVERSIBLE_CC_MODE (mode))
680 {
681#ifdef REVERSE_CONDITION
682 return REVERSE_CONDITION (code, mode);
683#endif
684 return reverse_condition (code);
685 }
686#endif
687
688 /* Try a few special cases based on the comparison code. */
689 switch (code)
690 {
691 case GEU:
692 case GTU:
693 case LEU:
694 case LTU:
695 case NE:
696 case EQ:
697 /* It is always safe to reverse EQ and NE, even for the floating
698 point. Similary the unsigned comparisons are never used for
699 floating point so we can reverse them in the default way. */
700 return reverse_condition (code);
701 case ORDERED:
702 case UNORDERED:
703 case LTGT:
704 case UNEQ:
705 /* In case we already see unordered comparison, we can be sure to
706 be dealing with floating point so we don't need any more tests. */
707 return reverse_condition_maybe_unordered (code);
708 case UNLT:
709 case UNLE:
710 case UNGT:
711 case UNGE:
712 /* We don't have safe way to reverse these yet. */
713 return UNKNOWN;
714 default:
715 break;
716 }
717
718 if (GET_MODE_CLASS (mode) == MODE_CC
719#ifdef HAVE_cc0
720 || arg0 == cc0_rtx
721#endif
722 )
723 {
724 rtx prev;
725 /* Try to search for the comparison to determine the real mode.
726 This code is expensive, but with sane machine description it
727 will be never used, since REVERSIBLE_CC_MODE will return true
728 in all cases. */
729 if (! insn)
730 return UNKNOWN;
731
732 for (prev = prev_nonnote_insn (insn);
733 prev != 0 && GET_CODE (prev) != CODE_LABEL;
734 prev = prev_nonnote_insn (prev))
735 {
736 rtx set = set_of (arg0, prev);
737 if (set && GET_CODE (set) == SET
738 && rtx_equal_p (SET_DEST (set), arg0))
739 {
740 rtx src = SET_SRC (set);
741
742 if (GET_CODE (src) == COMPARE)
743 {
744 rtx comparison = src;
745 arg0 = XEXP (src, 0);
746 mode = GET_MODE (arg0);
747 if (mode == VOIDmode)
748 mode = GET_MODE (XEXP (comparison, 1));
749 break;
750 }
751 /* We can get past reg-reg moves. This may be useful for model
752 of i387 comparisons that first move flag registers around. */
753 if (REG_P (src))
754 {
755 arg0 = src;
756 continue;
757 }
758 }
759 /* If register is clobbered in some ununderstandable way,
760 give up. */
761 if (set)
762 return UNKNOWN;
763 }
764 }
765
766 /* Test for an integer condition, or a floating-point comparison
767 in which NaNs can be ignored. */
768 if (GET_CODE (arg0) == CONST_INT
769 || (GET_MODE (arg0) != VOIDmode
770 && GET_MODE_CLASS (mode) != MODE_CC
771 && !HONOR_NANS (mode)))
772 return reverse_condition (code);
773
774 return UNKNOWN;
775}
776
777/* An wrapper around the previous function to take COMPARISON as rtx
778 expression. This simplifies many callers. */
779enum rtx_code
780reversed_comparison_code (comparison, insn)
781 rtx comparison, insn;
782{
783 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
784 return UNKNOWN;
785 return reversed_comparison_code_parts (GET_CODE (comparison),
786 XEXP (comparison, 0),
787 XEXP (comparison, 1), insn);
788}
789
790
791/* Given an rtx-code for a comparison, return the code for the negated
792 comparison. If no such code exists, return UNKNOWN.
793
794 WATCH OUT! reverse_condition is not safe to use on a jump that might
795 be acting on the results of an IEEE floating point comparison, because
796 of the special treatment of non-signaling nans in comparisons.
797 Use reversed_comparison_code instead. */
798
799enum rtx_code
800reverse_condition (code)
801 enum rtx_code code;
802{
803 switch (code)
804 {
805 case EQ:
806 return NE;
807 case NE:
808 return EQ;
809 case GT:
810 return LE;
811 case GE:
812 return LT;
813 case LT:
814 return GE;
815 case LE:
816 return GT;
817 case GTU:
818 return LEU;
819 case GEU:
820 return LTU;
821 case LTU:
822 return GEU;
823 case LEU:
824 return GTU;
825 case UNORDERED:
826 return ORDERED;
827 case ORDERED:
828 return UNORDERED;
829
830 case UNLT:
831 case UNLE:
832 case UNGT:
833 case UNGE:
834 case UNEQ:
835 case LTGT:
836 return UNKNOWN;
837
838 default:
839 abort ();
840 }
841}
842
843/* Similar, but we're allowed to generate unordered comparisons, which
844 makes it safe for IEEE floating-point. Of course, we have to recognize
845 that the target will support them too... */
846
847enum rtx_code
848reverse_condition_maybe_unordered (code)
849 enum rtx_code code;
850{
851 switch (code)
852 {
853 case EQ:
854 return NE;
855 case NE:
856 return EQ;
857 case GT:
858 return UNLE;
859 case GE:
860 return UNLT;
861 case LT:
862 return UNGE;
863 case LE:
864 return UNGT;
865 case LTGT:
866 return UNEQ;
867 case UNORDERED:
868 return ORDERED;
869 case ORDERED:
870 return UNORDERED;
871 case UNLT:
872 return GE;
873 case UNLE:
874 return GT;
875 case UNGT:
876 return LE;
877 case UNGE:
878 return LT;
879 case UNEQ:
880 return LTGT;
881
882 default:
883 abort ();
884 }
885}
886
887/* Similar, but return the code when two operands of a comparison are swapped.
888 This IS safe for IEEE floating-point. */
889
890enum rtx_code
891swap_condition (code)
892 enum rtx_code code;
893{
894 switch (code)
895 {
896 case EQ:
897 case NE:
898 case UNORDERED:
899 case ORDERED:
900 case UNEQ:
901 case LTGT:
902 return code;
903
904 case GT:
905 return LT;
906 case GE:
907 return LE;
908 case LT:
909 return GT;
910 case LE:
911 return GE;
912 case GTU:
913 return LTU;
914 case GEU:
915 return LEU;
916 case LTU:
917 return GTU;
918 case LEU:
919 return GEU;
920 case UNLT:
921 return UNGT;
922 case UNLE:
923 return UNGE;
924 case UNGT:
925 return UNLT;
926 case UNGE:
927 return UNLE;
928
929 default:
930 abort ();
931 }
932}
933
934/* Given a comparison CODE, return the corresponding unsigned comparison.
935 If CODE is an equality comparison or already an unsigned comparison,
936 CODE is returned. */
937
938enum rtx_code
939unsigned_condition (code)
940 enum rtx_code code;
941{
942 switch (code)
943 {
944 case EQ:
945 case NE:
946 case GTU:
947 case GEU:
948 case LTU:
949 case LEU:
950 return code;
951
952 case GT:
953 return GTU;
954 case GE:
955 return GEU;
956 case LT:
957 return LTU;
958 case LE:
959 return LEU;
960
961 default:
962 abort ();
963 }
964}
965
966/* Similarly, return the signed version of a comparison. */
967
968enum rtx_code
969signed_condition (code)
970 enum rtx_code code;
971{
972 switch (code)
973 {
974 case EQ:
975 case NE:
976 case GT:
977 case GE:
978 case LT:
979 case LE:
980 return code;
981
982 case GTU:
983 return GT;
984 case GEU:
985 return GE;
986 case LTU:
987 return LT;
988 case LEU:
989 return LE;
990
991 default:
992 abort ();
993 }
994}
995
996
997/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
998 truth of CODE1 implies the truth of CODE2. */
999
1000int
1001comparison_dominates_p (code1, code2)
1002 enum rtx_code code1, code2;
1003{
1004 /* UNKNOWN comparison codes can happen as a result of trying to revert
1005 comparison codes.
1006 They can't match anything, so we have to reject them here. */
1007 if (code1 == UNKNOWN || code2 == UNKNOWN)
1008 return 0;
1009
1010 if (code1 == code2)
1011 return 1;
1012
1013 switch (code1)
1014 {
1015 case UNEQ:
1016 if (code2 == UNLE || code2 == UNGE)
1017 return 1;
1018 break;
1019
1020 case EQ:
1021 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1022 || code2 == ORDERED)
1023 return 1;
1024 break;
1025
1026 case UNLT:
1027 if (code2 == UNLE || code2 == NE)
1028 return 1;
1029 break;
1030
1031 case LT:
1032 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1033 return 1;
1034 break;
1035
1036 case UNGT:
1037 if (code2 == UNGE || code2 == NE)
1038 return 1;
1039 break;
1040
1041 case GT:
1042 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1043 return 1;
1044 break;
1045
1046 case GE:
1047 case LE:
1048 if (code2 == ORDERED)
1049 return 1;
1050 break;
1051
1052 case LTGT:
1053 if (code2 == NE || code2 == ORDERED)
1054 return 1;
1055 break;
1056
1057 case LTU:
1058 if (code2 == LEU || code2 == NE)
1059 return 1;
1060 break;
1061
1062 case GTU:
1063 if (code2 == GEU || code2 == NE)
1064 return 1;
1065 break;
1066
1067 case UNORDERED:
1068 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1069 || code2 == UNGE || code2 == UNGT)
1070 return 1;
1071 break;
1072
1073 default:
1074 break;
1075 }
1076
1077 return 0;
1078}
1079
1080
1081/* Return 1 if INSN is an unconditional jump and nothing else. */
1082
1083int
1084simplejump_p (insn)
1085 rtx insn;
1086{
1087 return (GET_CODE (insn) == JUMP_INSN
1088 && GET_CODE (PATTERN (insn)) == SET
1089 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1090 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1091}
1092
1093/* Return 1 if INSN is an tablejump. */
1094
1095int
1096tablejump_p (insn)
1097 rtx insn;
1098{
1099 rtx table;
1100 return (GET_CODE (insn) == JUMP_INSN
1101 && JUMP_LABEL (insn)
1102 && NEXT_INSN (JUMP_LABEL (insn))
1103 && (table = next_active_insn (JUMP_LABEL (insn)))
1104 && GET_CODE (table) == JUMP_INSN
1105 && (GET_CODE (PATTERN (table)) == ADDR_VEC
1106 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC));
1107}
1108
1109/* Return nonzero if INSN is a (possibly) conditional jump
1110 and nothing more.
1111
1112 Use this function is deprecated, since we need to support combined
1113 branch and compare insns. Use any_condjump_p instead whenever possible. */
1114
1115int
1116condjump_p (insn)
1117 rtx insn;
1118{
1119 rtx x = PATTERN (insn);
1120
1121 if (GET_CODE (x) != SET
1122 || GET_CODE (SET_DEST (x)) != PC)
1123 return 0;
1124
1125 x = SET_SRC (x);
1126 if (GET_CODE (x) == LABEL_REF)
1127 return 1;
1128 else
1129 return (GET_CODE (x) == IF_THEN_ELSE
1130 && ((GET_CODE (XEXP (x, 2)) == PC
1131 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1132 || GET_CODE (XEXP (x, 1)) == RETURN))
1133 || (GET_CODE (XEXP (x, 1)) == PC
1134 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1135 || GET_CODE (XEXP (x, 2)) == RETURN))));
1136
1137 return 0;
1138}
1139
1140/* Return nonzero if INSN is a (possibly) conditional jump inside a
1141 PARALLEL.
1142
1143 Use this function is deprecated, since we need to support combined
1144 branch and compare insns. Use any_condjump_p instead whenever possible. */
1145
1146int
1147condjump_in_parallel_p (insn)
1148 rtx insn;
1149{
1150 rtx x = PATTERN (insn);
1151
1152 if (GET_CODE (x) != PARALLEL)
1153 return 0;
1154 else
1155 x = XVECEXP (x, 0, 0);
1156
1157 if (GET_CODE (x) != SET)
1158 return 0;
1159 if (GET_CODE (SET_DEST (x)) != PC)
1160 return 0;
1161 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1162 return 1;
1163 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1164 return 0;
1165 if (XEXP (SET_SRC (x), 2) == pc_rtx
1166 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1167 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1168 return 1;
1169 if (XEXP (SET_SRC (x), 1) == pc_rtx
1170 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1171 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1172 return 1;
1173 return 0;
1174}
1175
1176/* Return set of PC, otherwise NULL. */
1177
1178rtx
1179pc_set (insn)
1180 rtx insn;
1181{
1182 rtx pat;
1183 if (GET_CODE (insn) != JUMP_INSN)
1184 return NULL_RTX;
1185 pat = PATTERN (insn);
1186
1187 /* The set is allowed to appear either as the insn pattern or
1188 the first set in a PARALLEL. */
1189 if (GET_CODE (pat) == PARALLEL)
1190 pat = XVECEXP (pat, 0, 0);
1191 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1192 return pat;
1193
1194 return NULL_RTX;
1195}
1196
1197/* Return true when insn is an unconditional direct jump,
1198 possibly bundled inside a PARALLEL. */
1199
1200int
1201any_uncondjump_p (insn)
1202 rtx insn;
1203{
1204 rtx x = pc_set (insn);
1205 if (!x)
1206 return 0;
1207 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1208 return 0;
1209 return 1;
1210}
1211
1212/* Return true when insn is a conditional jump. This function works for
1213 instructions containing PC sets in PARALLELs. The instruction may have
1214 various other effects so before removing the jump you must verify
1215 onlyjump_p.
1216
1217 Note that unlike condjump_p it returns false for unconditional jumps. */
1218
1219int
1220any_condjump_p (insn)
1221 rtx insn;
1222{
1223 rtx x = pc_set (insn);
1224 enum rtx_code a, b;
1225
1226 if (!x)
1227 return 0;
1228 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1229 return 0;
1230
1231 a = GET_CODE (XEXP (SET_SRC (x), 1));
1232 b = GET_CODE (XEXP (SET_SRC (x), 2));
1233
1234 return ((b == PC && (a == LABEL_REF || a == RETURN))
1235 || (a == PC && (b == LABEL_REF || b == RETURN)));
1236}
1237
1238/* Return the label of a conditional jump. */
1239
1240rtx
1241condjump_label (insn)
1242 rtx insn;
1243{
1244 rtx x = pc_set (insn);
1245
1246 if (!x)
1247 return NULL_RTX;
1248 x = SET_SRC (x);
1249 if (GET_CODE (x) == LABEL_REF)
1250 return x;
1251 if (GET_CODE (x) != IF_THEN_ELSE)
1252 return NULL_RTX;
1253 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1254 return XEXP (x, 1);
1255 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1256 return XEXP (x, 2);
1257 return NULL_RTX;
1258}
1259
1260/* Return true if INSN is a (possibly conditional) return insn. */
1261
1262static int
1263returnjump_p_1 (loc, data)
1264 rtx *loc;
1265 void *data ATTRIBUTE_UNUSED;
1266{
1267 rtx x = *loc;
1268
1269 return x && (GET_CODE (x) == RETURN
1270 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1271}
1272
1273int
1274returnjump_p (insn)
1275 rtx insn;
1276{
1277 if (GET_CODE (insn) != JUMP_INSN)
1278 return 0;
1279 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1280}
1281
1282/* Return true if INSN is a jump that only transfers control and
1283 nothing more. */
1284
1285int
1286onlyjump_p (insn)
1287 rtx insn;
1288{
1289 rtx set;
1290
1291 if (GET_CODE (insn) != JUMP_INSN)
1292 return 0;
1293
1294 set = single_set (insn);
1295 if (set == NULL)
1296 return 0;
1297 if (GET_CODE (SET_DEST (set)) != PC)
1298 return 0;
1299 if (side_effects_p (SET_SRC (set)))
1300 return 0;
1301
1302 return 1;
1303}
1304
1305#ifdef HAVE_cc0
1306
1307/* Return nonzero if X is an RTX that only sets the condition codes
1308 and has no side effects. */
1309
1310int
1311only_sets_cc0_p (x)
1312 rtx x;
1313{
1314
1315 if (! x)
1316 return 0;
1317
1318 if (INSN_P (x))
1319 x = PATTERN (x);
1320
1321 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1322}
1323
1324/* Return 1 if X is an RTX that does nothing but set the condition codes
1325 and CLOBBER or USE registers.
1326 Return -1 if X does explicitly set the condition codes,
1327 but also does other things. */
1328
1329int
1330sets_cc0_p (x)
1331 rtx x;
1332{
1333
1334 if (! x)
1335 return 0;
1336
1337 if (INSN_P (x))
1338 x = PATTERN (x);
1339
1340 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1341 return 1;
1342 if (GET_CODE (x) == PARALLEL)
1343 {
1344 int i;
1345 int sets_cc0 = 0;
1346 int other_things = 0;
1347 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1348 {
1349 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1350 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1351 sets_cc0 = 1;
1352 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1353 other_things = 1;
1354 }
1355 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1356 }
1357 return 0;
1358}
1359#endif
1360
1361
1362/* Follow any unconditional jump at LABEL;
1363 return the ultimate label reached by any such chain of jumps.
1364 If LABEL is not followed by a jump, return LABEL.
1365 If the chain loops or we can't find end, return LABEL,
1366 since that tells caller to avoid changing the insn.
1367
1368 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1369 a USE or CLOBBER. */
1370
1371rtx
1372follow_jumps (label)
1373 rtx label;
1374{
1375 rtx insn;
1376 rtx next;
1377 rtx value = label;
1378 int depth;
1379
1380 for (depth = 0;
1381 (depth < 10
1382 && (insn = next_active_insn (value)) != 0
1383 && GET_CODE (insn) == JUMP_INSN
1384 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1385 && onlyjump_p (insn))
1386 || GET_CODE (PATTERN (insn)) == RETURN)
1387 && (next = NEXT_INSN (insn))
1388 && GET_CODE (next) == BARRIER);
1389 depth++)
1390 {
1391 /* Don't chain through the insn that jumps into a loop
1392 from outside the loop,
1393 since that would create multiple loop entry jumps
1394 and prevent loop optimization. */
1395 rtx tem;
1396 if (!reload_completed)
1397 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1398 if (GET_CODE (tem) == NOTE
1399 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1400 /* ??? Optional. Disables some optimizations, but makes
1401 gcov output more accurate with -O. */
1402 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1403 return value;
1404
1405 /* If we have found a cycle, make the insn jump to itself. */
1406 if (JUMP_LABEL (insn) == label)
1407 return label;
1408
1409 tem = next_active_insn (JUMP_LABEL (insn));
1410 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1411 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1412 break;
1413
1414 value = JUMP_LABEL (insn);
1415 }
1416 if (depth == 10)
1417 return label;
1418 return value;
1419}
1420
1421
1422
1423/* Find all CODE_LABELs referred to in X, and increment their use counts.
1424 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1425 in INSN, then store one of them in JUMP_LABEL (INSN).
1426 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1427 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1428 Also, when there are consecutive labels, canonicalize on the last of them.
1429
1430 Note that two labels separated by a loop-beginning note
1431 must be kept distinct if we have not yet done loop-optimization,
1432 because the gap between them is where loop-optimize
1433 will want to move invariant code to. CROSS_JUMP tells us
1434 that loop-optimization is done with. */
1435
1436void
1437mark_jump_label (x, insn, in_mem)
1438 rtx x;
1439 rtx insn;
1440 int in_mem;
1441{
1442 RTX_CODE code = GET_CODE (x);
1443 int i;
1444 const char *fmt;
1445
1446 switch (code)
1447 {
1448 case PC:
1449 case CC0:
1450 case REG:
1451 case CONST_INT:
1452 case CONST_DOUBLE:
1453 case CLOBBER:
1454 case CALL:
1455 return;
1456
1457 case MEM:
1458 in_mem = 1;
1459 break;
1460
1461 case SYMBOL_REF:
1462 if (!in_mem)
1463 return;
1464
1465 /* If this is a constant-pool reference, see if it is a label. */
1466 if (CONSTANT_POOL_ADDRESS_P (x))
1467 mark_jump_label (get_pool_constant (x), insn, in_mem);
1468 break;
1469
1470 case LABEL_REF:
1471 {
1472 rtx label = XEXP (x, 0);
1473
1474 /* Ignore remaining references to unreachable labels that
1475 have been deleted. */
1476 if (GET_CODE (label) == NOTE
1477 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1478 break;
1479
1480 if (GET_CODE (label) != CODE_LABEL)
1481 abort ();
1482
1483 /* Ignore references to labels of containing functions. */
1484 if (LABEL_REF_NONLOCAL_P (x))
1485 break;
1486
1487 XEXP (x, 0) = label;
1488 if (! insn || ! INSN_DELETED_P (insn))
1489 ++LABEL_NUSES (label);
1490
1491 if (insn)
1492 {
1493 if (GET_CODE (insn) == JUMP_INSN)
1494 JUMP_LABEL (insn) = label;
1495 else
1496 {
1497 /* Add a REG_LABEL note for LABEL unless there already
1498 is one. All uses of a label, except for labels
1499 that are the targets of jumps, must have a
1500 REG_LABEL note. */
1501 if (! find_reg_note (insn, REG_LABEL, label))
1502 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1503 REG_NOTES (insn));
1504 }
1505 }
1506 return;
1507 }
1508
1509 /* Do walk the labels in a vector, but not the first operand of an
1510 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1511 case ADDR_VEC:
1512 case ADDR_DIFF_VEC:
1513 if (! INSN_DELETED_P (insn))
1514 {
1515 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1516
1517 for (i = 0; i < XVECLEN (x, eltnum); i++)
1518 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1519 }
1520 return;
1521
1522 default:
1523 break;
1524 }
1525
1526 fmt = GET_RTX_FORMAT (code);
1527 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1528 {
1529 if (fmt[i] == 'e')
1530 mark_jump_label (XEXP (x, i), insn, in_mem);
1531 else if (fmt[i] == 'E')
1532 {
1533 int j;
1534 for (j = 0; j < XVECLEN (x, i); j++)
1535 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1536 }
1537 }
1538}
1539
1540/* If all INSN does is set the pc, delete it,
1541 and delete the insn that set the condition codes for it
1542 if that's what the previous thing was. */
1543
1544void
1545delete_jump (insn)
1546 rtx insn;
1547{
1548 rtx set = single_set (insn);
1549
1550 if (set && GET_CODE (SET_DEST (set)) == PC)
1551 delete_computation (insn);
1552}
1553
1554/* Verify INSN is a BARRIER and delete it. */
1555
1556void
1557delete_barrier (insn)
1558 rtx insn;
1559{
1560 if (GET_CODE (insn) != BARRIER)
1561 abort ();
1562
1563 delete_insn (insn);
1564}
1565
1566/* Recursively delete prior insns that compute the value (used only by INSN
1567 which the caller is deleting) stored in the register mentioned by NOTE
1568 which is a REG_DEAD note associated with INSN. */
1569
1570static void
1571delete_prior_computation (note, insn)
1572 rtx note;
1573 rtx insn;
1574{
1575 rtx our_prev;
1576 rtx reg = XEXP (note, 0);
1577
1578 for (our_prev = prev_nonnote_insn (insn);
1579 our_prev && (GET_CODE (our_prev) == INSN
1580 || GET_CODE (our_prev) == CALL_INSN);
1581 our_prev = prev_nonnote_insn (our_prev))
1582 {
1583 rtx pat = PATTERN (our_prev);
1584
1585 /* If we reach a CALL which is not calling a const function
1586 or the callee pops the arguments, then give up. */
1587 if (GET_CODE (our_prev) == CALL_INSN
1588 && (! CONST_OR_PURE_CALL_P (our_prev)
1589 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1590 break;
1591
1592 /* If we reach a SEQUENCE, it is too complex to try to
1593 do anything with it, so give up. We can be run during
1594 and after reorg, so SEQUENCE rtl can legitimately show
1595 up here. */
1596 if (GET_CODE (pat) == SEQUENCE)
1597 break;
1598
1599 if (GET_CODE (pat) == USE
1600 && GET_CODE (XEXP (pat, 0)) == INSN)
1601 /* reorg creates USEs that look like this. We leave them
1602 alone because reorg needs them for its own purposes. */
1603 break;
1604
1605 if (reg_set_p (reg, pat))
1606 {
1607 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1608 break;
1609
1610 if (GET_CODE (pat) == PARALLEL)
1611 {
1612 /* If we find a SET of something else, we can't
1613 delete the insn. */
1614
1615 int i;
1616
1617 for (i = 0; i < XVECLEN (pat, 0); i++)
1618 {
1619 rtx part = XVECEXP (pat, 0, i);
1620
1621 if (GET_CODE (part) == SET
1622 && SET_DEST (part) != reg)
1623 break;
1624 }
1625
1626 if (i == XVECLEN (pat, 0))
1627 delete_computation (our_prev);
1628 }
1629 else if (GET_CODE (pat) == SET
1630 && GET_CODE (SET_DEST (pat)) == REG)
1631 {
1632 int dest_regno = REGNO (SET_DEST (pat));
1633 int dest_endregno
1634 = (dest_regno
1635 + (dest_regno < FIRST_PSEUDO_REGISTER
1636 ? HARD_REGNO_NREGS (dest_regno,
1637 GET_MODE (SET_DEST (pat))) : 1));
1638 int regno = REGNO (reg);
1639 int endregno
1640 = (regno
1641 + (regno < FIRST_PSEUDO_REGISTER
1642 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1643
1644 if (dest_regno >= regno
1645 && dest_endregno <= endregno)
1646 delete_computation (our_prev);
1647
1648 /* We may have a multi-word hard register and some, but not
1649 all, of the words of the register are needed in subsequent
1650 insns. Write REG_UNUSED notes for those parts that were not
1651 needed. */
1652 else if (dest_regno <= regno
1653 && dest_endregno >= endregno)
1654 {
1655 int i;
1656
1657 REG_NOTES (our_prev)
1658 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1659 REG_NOTES (our_prev));
1660
1661 for (i = dest_regno; i < dest_endregno; i++)
1662 if (! find_regno_note (our_prev, REG_UNUSED, i))
1663 break;
1664
1665 if (i == dest_endregno)
1666 delete_computation (our_prev);
1667 }
1668 }
1669
1670 break;
1671 }
1672
1673 /* If PAT references the register that dies here, it is an
1674 additional use. Hence any prior SET isn't dead. However, this
1675 insn becomes the new place for the REG_DEAD note. */
1676 if (reg_overlap_mentioned_p (reg, pat))
1677 {
1678 XEXP (note, 1) = REG_NOTES (our_prev);
1679 REG_NOTES (our_prev) = note;
1680 break;
1681 }
1682 }
1683}
1684
1685/* Delete INSN and recursively delete insns that compute values used only
1686 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1687 If we are running before flow.c, we need do nothing since flow.c will
1688 delete dead code. We also can't know if the registers being used are
1689 dead or not at this point.
1690
1691 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1692 nothing other than set a register that dies in this insn, we can delete
1693 that insn as well.
1694
1695 On machines with CC0, if CC0 is used in this insn, we may be able to
1696 delete the insn that set it. */
1697
1698static void
1699delete_computation (insn)
1700 rtx insn;
1701{
1702 rtx note, next;
1703
1704#ifdef HAVE_cc0
1705 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1706 {
1707 rtx prev = prev_nonnote_insn (insn);
1708 /* We assume that at this stage
1709 CC's are always set explicitly
1710 and always immediately before the jump that
1711 will use them. So if the previous insn
1712 exists to set the CC's, delete it
1713 (unless it performs auto-increments, etc.). */
1714 if (prev && GET_CODE (prev) == INSN
1715 && sets_cc0_p (PATTERN (prev)))
1716 {
1717 if (sets_cc0_p (PATTERN (prev)) > 0
1718 && ! side_effects_p (PATTERN (prev)))
1719 delete_computation (prev);
1720 else
1721 /* Otherwise, show that cc0 won't be used. */
1722 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1723 cc0_rtx, REG_NOTES (prev));
1724 }
1725 }
1726#endif
1727
1728 for (note = REG_NOTES (insn); note; note = next)
1729 {
1730 next = XEXP (note, 1);
1731
1732 if (REG_NOTE_KIND (note) != REG_DEAD
1733 /* Verify that the REG_NOTE is legitimate. */
1734 || GET_CODE (XEXP (note, 0)) != REG)
1735 continue;
1736
1737 delete_prior_computation (note, insn);
1738 }
1739
1740 delete_related_insns (insn);
1741}
1742
1743
1744/* Delete insn INSN from the chain of insns and update label ref counts
1745 and delete insns now unreachable.
1746
1747 Returns the first insn after INSN that was not deleted.
1748
1749 Usage of this instruction is deprecated. Use delete_insn instead and
1750 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1751
1752rtx
1753delete_related_insns (insn)
1754 rtx insn;
1755{
1756 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1757 rtx note;
1758 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1759
1760 while (next && INSN_DELETED_P (next))
1761 next = NEXT_INSN (next);
1762
1763 /* This insn is already deleted => return first following nondeleted. */
1764 if (INSN_DELETED_P (insn))
1765 return next;
1766
1767 delete_insn (insn);
1768
1769 /* If instruction is followed by a barrier,
1770 delete the barrier too. */
1771
1772 if (next != 0 && GET_CODE (next) == BARRIER)
1773 delete_insn (next);
1774
1775 /* If deleting a jump, decrement the count of the label,
1776 and delete the label if it is now unused. */
1777
1778 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1779 {
1780 rtx lab = JUMP_LABEL (insn), lab_next;
1781
1782 if (LABEL_NUSES (lab) == 0)
1783 {
1784 /* This can delete NEXT or PREV,
1785 either directly if NEXT is JUMP_LABEL (INSN),
1786 or indirectly through more levels of jumps. */
1787 delete_related_insns (lab);
1788
1789 /* I feel a little doubtful about this loop,
1790 but I see no clean and sure alternative way
1791 to find the first insn after INSN that is not now deleted.
1792 I hope this works. */
1793 while (next && INSN_DELETED_P (next))
1794 next = NEXT_INSN (next);
1795 return next;
1796 }
1797 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1798 && GET_CODE (lab_next) == JUMP_INSN
1799 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1800 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1801 {
1802 /* If we're deleting the tablejump, delete the dispatch table.
1803 We may not be able to kill the label immediately preceding
1804 just yet, as it might be referenced in code leading up to
1805 the tablejump. */
1806 delete_related_insns (lab_next);
1807 }
1808 }
1809
1810 /* Likewise if we're deleting a dispatch table. */
1811
1812 if (GET_CODE (insn) == JUMP_INSN
1813 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1814 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1815 {
1816 rtx pat = PATTERN (insn);
1817 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1818 int len = XVECLEN (pat, diff_vec_p);
1819
1820 for (i = 0; i < len; i++)
1821 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1822 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1823 while (next && INSN_DELETED_P (next))
1824 next = NEXT_INSN (next);
1825 return next;
1826 }
1827
1828 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1829 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1830 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1831 if (REG_NOTE_KIND (note) == REG_LABEL
1832 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1833 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1834 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1835 delete_related_insns (XEXP (note, 0));
1836
1837 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1838 prev = PREV_INSN (prev);
1839
1840 /* If INSN was a label and a dispatch table follows it,
1841 delete the dispatch table. The tablejump must have gone already.
1842 It isn't useful to fall through into a table. */
1843
1844 if (was_code_label
1845 && NEXT_INSN (insn) != 0
1846 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1847 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1848 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1849 next = delete_related_insns (NEXT_INSN (insn));
1850
1851 /* If INSN was a label, delete insns following it if now unreachable. */
1852
1853 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1854 {
1855 RTX_CODE code;
1856 while (next != 0
1857 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1858 || code == NOTE || code == BARRIER
1859 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1860 {
1861 if (code == NOTE
1862 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1863 next = NEXT_INSN (next);
1864 /* Keep going past other deleted labels to delete what follows. */
1865 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1866 next = NEXT_INSN (next);
1867 else
1868 /* Note: if this deletes a jump, it can cause more
1869 deletion of unreachable code, after a different label.
1870 As long as the value from this recursive call is correct,
1871 this invocation functions correctly. */
1872 next = delete_related_insns (next);
1873 }
1874 }
1875
1876 return next;
1877}
1878
1879/* Advance from INSN till reaching something not deleted
1880 then return that. May return INSN itself. */
1881
1882rtx
1883next_nondeleted_insn (insn)
1884 rtx insn;
1885{
1886 while (INSN_DELETED_P (insn))
1887 insn = NEXT_INSN (insn);
1888 return insn;
1889}
1890
1891
1892/* Delete a range of insns from FROM to TO, inclusive.
1893 This is for the sake of peephole optimization, so assume
1894 that whatever these insns do will still be done by a new
1895 peephole insn that will replace them. */
1896
1897void
1898delete_for_peephole (from, to)
1899 rtx from, to;
1900{
1901 rtx insn = from;
1902
1903 while (1)
1904 {
1905 rtx next = NEXT_INSN (insn);
1906 rtx prev = PREV_INSN (insn);
1907
1908 if (GET_CODE (insn) != NOTE)
1909 {
1910 INSN_DELETED_P (insn) = 1;
1911
1912 /* Patch this insn out of the chain. */
1913 /* We don't do this all at once, because we
1914 must preserve all NOTEs. */
1915 if (prev)
1916 NEXT_INSN (prev) = next;
1917
1918 if (next)
1919 PREV_INSN (next) = prev;
1920 }
1921
1922 if (insn == to)
1923 break;
1924 insn = next;
1925 }
1926
1927 /* Note that if TO is an unconditional jump
1928 we *do not* delete the BARRIER that follows,
1929 since the peephole that replaces this sequence
1930 is also an unconditional jump in that case. */
1931}
1932
1933
1934/* We have determined that AVOIDED_INSN is never reached, and are
1935 about to delete it. If the insn chain between AVOIDED_INSN and
1936 FINISH contains more than one line from the current function, and
1937 contains at least one operation, print a warning if the user asked
1938 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1939
1940 CSE and inlining can duplicate insns, so it's possible to get
1941 spurious warnings from this. */
1942
1943void
1944never_reached_warning (avoided_insn, finish)
1945 rtx avoided_insn, finish;
1946{
1947 rtx insn;
1948 rtx a_line_note = NULL;
1949 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1950
1951 if (!warn_notreached)
1952 return;
1953
1954 /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1955 us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1956 the line note. */
1957 insn = avoided_insn;
1958 while (1)
1959 {
1960 rtx prev = PREV_INSN (insn);
1961 if (prev == NULL_RTX
1962 || GET_CODE (prev) != NOTE)
1963 break;
1964 insn = prev;
1965 }
1966
1967 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1968 in case FINISH is NULL, otherwise until we run out of insns. */
1969
1970 for (; insn != NULL; insn = NEXT_INSN (insn))
1971 {
1972 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1973 || GET_CODE (insn) == BARRIER)
1974 break;
1975
1976 if (GET_CODE (insn) == NOTE /* A line number note? */
1977 && NOTE_LINE_NUMBER (insn) >= 0)
1978 {
1979 if (a_line_note == NULL)
1980 a_line_note = insn;
1981 else
1982 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1983 != NOTE_LINE_NUMBER (insn));
1984 }
1985 else if (INSN_P (insn))
1986 {
1987 if (reached_end)
1988 break;
1989 contains_insn = 1;
1990 }
1991
1992 if (insn == finish)
1993 reached_end = 1;
1994 }
1995 if (two_avoided_lines && contains_insn)
1996 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1997 NOTE_LINE_NUMBER (a_line_note),
1998 "will never be executed");
1999}
2000
2001
2002/* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
2003 NLABEL as a return. Accrue modifications into the change group. */
2004
2005static void
2006redirect_exp_1 (loc, olabel, nlabel, insn)
2007 rtx *loc;
2008 rtx olabel, nlabel;
2009 rtx insn;
2010{
2011 rtx x = *loc;
2012 RTX_CODE code = GET_CODE (x);
2013 int i;
2014 const char *fmt;
2015
2016 if (code == LABEL_REF)
2017 {
2018 if (XEXP (x, 0) == olabel)
2019 {
2020 rtx n;
2021 if (nlabel)
2022 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2023 else
2024 n = gen_rtx_RETURN (VOIDmode);
2025
2026 validate_change (insn, loc, n, 1);
2027 return;
2028 }
2029 }
2030 else if (code == RETURN && olabel == 0)
2031 {
2032 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2033 if (loc == &PATTERN (insn))
2034 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2035 validate_change (insn, loc, x, 1);
2036 return;
2037 }
2038
2039 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
2040 && GET_CODE (SET_SRC (x)) == LABEL_REF
2041 && XEXP (SET_SRC (x), 0) == olabel)
2042 {
2043 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
2044 return;
2045 }
2046
2047 fmt = GET_RTX_FORMAT (code);
2048 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2049 {
2050 if (fmt[i] == 'e')
2051 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2052 else if (fmt[i] == 'E')
2053 {
2054 int j;
2055 for (j = 0; j < XVECLEN (x, i); j++)
2056 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2057 }
2058 }
2059}
2060
2061/* Similar, but apply the change group and report success or failure. */
2062
2063static int
2064redirect_exp (olabel, nlabel, insn)
2065 rtx olabel, nlabel;
2066 rtx insn;
2067{
2068 rtx *loc;
2069
2070 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2071 loc = &XVECEXP (PATTERN (insn), 0, 0);
2072 else
2073 loc = &PATTERN (insn);
2074
2075 redirect_exp_1 (loc, olabel, nlabel, insn);
2076 if (num_validated_changes () == 0)
2077 return 0;
2078
2079 return apply_change_group ();
2080}
2081
2082/* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2083 the modifications into the change group. Return false if we did
2084 not see how to do that. */
2085
2086int
2087redirect_jump_1 (jump, nlabel)
2088 rtx jump, nlabel;
2089{
2090 int ochanges = num_validated_changes ();
2091 rtx *loc;
2092
2093 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2094 loc = &XVECEXP (PATTERN (jump), 0, 0);
2095 else
2096 loc = &PATTERN (jump);
2097
2098 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2099 return num_validated_changes () > ochanges;
2100}
2101
2102/* Make JUMP go to NLABEL instead of where it jumps now. If the old
2103 jump target label is unused as a result, it and the code following
2104 it may be deleted.
2105
2106 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2107 RETURN insn.
2108
2109 The return value will be 1 if the change was made, 0 if it wasn't
2110 (this can only occur for NLABEL == 0). */
2111
2112int
2113redirect_jump (jump, nlabel, delete_unused)
2114 rtx jump, nlabel;
2115 int delete_unused;
2116{
2117 rtx olabel = JUMP_LABEL (jump);
2118
2119 if (nlabel == olabel)
2120 return 1;
2121
2122 if (! redirect_exp (olabel, nlabel, jump))
2123 return 0;
2124
2125 JUMP_LABEL (jump) = nlabel;
2126 if (nlabel)
2127 ++LABEL_NUSES (nlabel);
2128
2129 /* If we're eliding the jump over exception cleanups at the end of a
2130 function, move the function end note so that -Wreturn-type works. */
2131 if (olabel && nlabel
2132 && NEXT_INSN (olabel)
2133 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2134 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2135 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2136
2137 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2138 /* Undefined labels will remain outside the insn stream. */
2139 && INSN_UID (olabel))
2140 delete_related_insns (olabel);
2141
2142 return 1;
2143}
2144
2145/* Invert the jump condition of rtx X contained in jump insn, INSN.
2146 Accrue the modifications into the change group. */
2147
2148static void
2149invert_exp_1 (insn)
2150 rtx insn;
2151{
2152 RTX_CODE code;
2153 rtx x = pc_set (insn);
2154
2155 if (!x)
2156 abort ();
2157 x = SET_SRC (x);
2158
2159 code = GET_CODE (x);
2160
2161 if (code == IF_THEN_ELSE)
2162 {
2163 rtx comp = XEXP (x, 0);
2164 rtx tem;
2165 enum rtx_code reversed_code;
2166
2167 /* We can do this in two ways: The preferable way, which can only
2168 be done if this is not an integer comparison, is to reverse
2169 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2170 of the IF_THEN_ELSE. If we can't do either, fail. */
2171
2172 reversed_code = reversed_comparison_code (comp, insn);
2173
2174 if (reversed_code != UNKNOWN)
2175 {
2176 validate_change (insn, &XEXP (x, 0),
2177 gen_rtx_fmt_ee (reversed_code,
2178 GET_MODE (comp), XEXP (comp, 0),
2179 XEXP (comp, 1)),
2180 1);
2181 return;
2182 }
2183
2184 tem = XEXP (x, 1);
2185 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2186 validate_change (insn, &XEXP (x, 2), tem, 1);
2187 }
2188 else
2189 abort ();
2190}
2191
2192/* Invert the jump condition of conditional jump insn, INSN.
2193
2194 Return 1 if we can do so, 0 if we cannot find a way to do so that
2195 matches a pattern. */
2196
2197static int
2198invert_exp (insn)
2199 rtx insn;
2200{
2201 invert_exp_1 (insn);
2202 if (num_validated_changes () == 0)
2203 return 0;
2204
2205 return apply_change_group ();
2206}
2207
2208/* Invert the condition of the jump JUMP, and make it jump to label
2209 NLABEL instead of where it jumps now. Accrue changes into the
2210 change group. Return false if we didn't see how to perform the
2211 inversion and redirection. */
2212
2213int
2214invert_jump_1 (jump, nlabel)
2215 rtx jump, nlabel;
2216{
2217 int ochanges;
2218
2219 ochanges = num_validated_changes ();
2220 invert_exp_1 (jump);
2221 if (num_validated_changes () == ochanges)
2222 return 0;
2223
2224 return redirect_jump_1 (jump, nlabel);
2225}
2226
2227/* Invert the condition of the jump JUMP, and make it jump to label
2228 NLABEL instead of where it jumps now. Return true if successful. */
2229
2230int
2231invert_jump (jump, nlabel, delete_unused)
2232 rtx jump, nlabel;
2233 int delete_unused;
2234{
2235 /* We have to either invert the condition and change the label or
2236 do neither. Either operation could fail. We first try to invert
2237 the jump. If that succeeds, we try changing the label. If that fails,
2238 we invert the jump back to what it was. */
2239
2240 if (! invert_exp (jump))
2241 return 0;
2242
2243 if (redirect_jump (jump, nlabel, delete_unused))
2244 {
2245 invert_br_probabilities (jump);
2246
2247 return 1;
2248 }
2249
2250 if (! invert_exp (jump))
2251 /* This should just be putting it back the way it was. */
2252 abort ();
2253
2254 return 0;
2255}
2256
2257
2258
2259/* Like rtx_equal_p except that it considers two REGs as equal
2260 if they renumber to the same value and considers two commutative
2261 operations to be the same if the order of the operands has been
2262 reversed.
2263
2264 ??? Addition is not commutative on the PA due to the weird implicit
2265 space register selection rules for memory addresses. Therefore, we
2266 don't consider a + b == b + a.
2267
2268 We could/should make this test a little tighter. Possibly only
2269 disabling it on the PA via some backend macro or only disabling this
2270 case when the PLUS is inside a MEM. */
2271
2272int
2273rtx_renumbered_equal_p (x, y)
2274 rtx x, y;
2275{
2276 int i;
2277 RTX_CODE code = GET_CODE (x);
2278 const char *fmt;
2279
2280 if (x == y)
2281 return 1;
2282
2283 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2284 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2285 && GET_CODE (SUBREG_REG (y)) == REG)))
2286 {
2287 int reg_x = -1, reg_y = -1;
2288 int byte_x = 0, byte_y = 0;
2289
2290 if (GET_MODE (x) != GET_MODE (y))
2291 return 0;
2292
2293 /* If we haven't done any renumbering, don't
2294 make any assumptions. */
2295 if (reg_renumber == 0)
2296 return rtx_equal_p (x, y);
2297
2298 if (code == SUBREG)
2299 {
2300 reg_x = REGNO (SUBREG_REG (x));
2301 byte_x = SUBREG_BYTE (x);
2302
2303 if (reg_renumber[reg_x] >= 0)
2304 {
2305 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2306 GET_MODE (SUBREG_REG (x)),
2307 byte_x,
2308 GET_MODE (x));
2309 byte_x = 0;
2310 }
2311 }
2312 else
2313 {
2314 reg_x = REGNO (x);
2315 if (reg_renumber[reg_x] >= 0)
2316 reg_x = reg_renumber[reg_x];
2317 }
2318
2319 if (GET_CODE (y) == SUBREG)
2320 {
2321 reg_y = REGNO (SUBREG_REG (y));
2322 byte_y = SUBREG_BYTE (y);
2323
2324 if (reg_renumber[reg_y] >= 0)
2325 {
2326 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2327 GET_MODE (SUBREG_REG (y)),
2328 byte_y,
2329 GET_MODE (y));
2330 byte_y = 0;
2331 }
2332 }
2333 else
2334 {
2335 reg_y = REGNO (y);
2336 if (reg_renumber[reg_y] >= 0)
2337 reg_y = reg_renumber[reg_y];
2338 }
2339
2340 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2341 }
2342
2343 /* Now we have disposed of all the cases
2344 in which different rtx codes can match. */
2345 if (code != GET_CODE (y))
2346 return 0;
2347
2348 switch (code)
2349 {
2350 case PC:
2351 case CC0:
2352 case ADDR_VEC:
2353 case ADDR_DIFF_VEC:
2354 return 0;
2355
2356 case CONST_INT:
2357 return INTVAL (x) == INTVAL (y);
2358
2359 case LABEL_REF:
2360 /* We can't assume nonlocal labels have their following insns yet. */
2361 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2362 return XEXP (x, 0) == XEXP (y, 0);
2363
2364 /* Two label-refs are equivalent if they point at labels
2365 in the same position in the instruction stream. */
2366 return (next_real_insn (XEXP (x, 0))
2367 == next_real_insn (XEXP (y, 0)));
2368
2369 case SYMBOL_REF:
2370 return XSTR (x, 0) == XSTR (y, 0);
2371
2372 case CODE_LABEL:
2373 /* If we didn't match EQ equality above, they aren't the same. */
2374 return 0;
2375
2376 default:
2377 break;
2378 }
2379
2380 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2381
2382 if (GET_MODE (x) != GET_MODE (y))
2383 return 0;
2384
2385 /* For commutative operations, the RTX match if the operand match in any
2386 order. Also handle the simple binary and unary cases without a loop.
2387
2388 ??? Don't consider PLUS a commutative operator; see comments above. */
2389 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2390 && code != PLUS)
2391 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2392 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2393 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2394 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2395 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2396 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2397 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2398 else if (GET_RTX_CLASS (code) == '1')
2399 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2400
2401 /* Compare the elements. If any pair of corresponding elements
2402 fail to match, return 0 for the whole things. */
2403
2404 fmt = GET_RTX_FORMAT (code);
2405 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2406 {
2407 int j;
2408 switch (fmt[i])
2409 {
2410 case 'w':
2411 if (XWINT (x, i) != XWINT (y, i))
2412 return 0;
2413 break;
2414
2415 case 'i':
2416 if (XINT (x, i) != XINT (y, i))
2417 return 0;
2418 break;
2419
2420 case 't':
2421 if (XTREE (x, i) != XTREE (y, i))
2422 return 0;
2423 break;
2424
2425 case 's':
2426 if (strcmp (XSTR (x, i), XSTR (y, i)))
2427 return 0;
2428 break;
2429
2430 case 'e':
2431 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2432 return 0;
2433 break;
2434
2435 case 'u':
2436 if (XEXP (x, i) != XEXP (y, i))
2437 return 0;
2438 /* fall through. */
2439 case '0':
2440 break;
2441
2442 case 'E':
2443 if (XVECLEN (x, i) != XVECLEN (y, i))
2444 return 0;
2445 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2446 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2447 return 0;
2448 break;
2449
2450 default:
2451 abort ();
2452 }
2453 }
2454 return 1;
2455}
2456
2457
2458/* If X is a hard register or equivalent to one or a subregister of one,
2459 return the hard register number. If X is a pseudo register that was not
2460 assigned a hard register, return the pseudo register number. Otherwise,
2461 return -1. Any rtx is valid for X. */
2462
2463int
2464true_regnum (x)
2465 rtx x;
2466{
2467 if (GET_CODE (x) == REG)
2468 {
2469 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2470 return reg_renumber[REGNO (x)];
2471 return REGNO (x);
2472 }
2473 if (GET_CODE (x) == SUBREG)
2474 {
2475 int base = true_regnum (SUBREG_REG (x));
2476 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2477 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2478 GET_MODE (SUBREG_REG (x)),
2479 SUBREG_BYTE (x), GET_MODE (x));
2480 }
2481 return -1;
2482}
2483
2484/* Return regno of the register REG and handle subregs too. */
2485unsigned int
2486reg_or_subregno (reg)
2487 rtx reg;
2488{
2489 if (REG_P (reg))
2490 return REGNO (reg);
2491 if (GET_CODE (reg) == SUBREG)
2492 return REGNO (SUBREG_REG (reg));
2493 abort ();
2494}
Note: See TracBrowser for help on using the repository browser.