source: trunk/src/gcc/gcc/jump.c@ 2

Last change on this file since 2 was 2, checked in by bird, 23 years ago

Initial revision

  • Property cvs2svn:cvs-rev set to 1.1
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 63.0 KB
Line 
1/* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
36
37#include "config.h"
38#include "system.h"
39#include "rtl.h"
40#include "tm_p.h"
41#include "flags.h"
42#include "hard-reg-set.h"
43#include "regs.h"
44#include "insn-config.h"
45#include "insn-attr.h"
46#include "recog.h"
47#include "function.h"
48#include "expr.h"
49#include "real.h"
50#include "except.h"
51#include "toplev.h"
52#include "reload.h"
53#include "predict.h"
54
55/* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57/* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
61
62static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
63static int init_label_info PARAMS ((rtx));
64static void mark_all_labels PARAMS ((rtx));
65static int duplicate_loop_exit_test PARAMS ((rtx));
66static void delete_computation PARAMS ((rtx));
67static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
68static int redirect_exp PARAMS ((rtx, rtx, rtx));
69static void invert_exp_1 PARAMS ((rtx));
70static int invert_exp PARAMS ((rtx));
71static int returnjump_p_1 PARAMS ((rtx *, void *));
72static void delete_prior_computation PARAMS ((rtx, rtx));
73
74
75/* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 instructions. */
78void
79rebuild_jump_labels (f)
80 rtx f;
81{
82 rtx insn;
83 int max_uid = 0;
84
85 max_uid = init_label_info (f) + 1;
86
87 mark_all_labels (f);
88
89 /* Keep track of labels used from static data; we don't track them
90 closely enough to delete them here, so make sure their reference
91 count doesn't drop to zero. */
92
93 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
95 LABEL_NUSES (XEXP (insn, 0))++;
96}
97
98
99/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100 non-fallthru insn. This is not generally true, as multiple barriers
101 may have crept in, or the BARRIER may be separated from the last
102 real insn by one or more NOTEs.
103
104 This simple pass moves barriers and removes duplicates so that the
105 old code is happy.
106 */
107void
108cleanup_barriers ()
109{
110 rtx insn, next, prev;
111 for (insn = get_insns (); insn; insn = next)
112 {
113 next = NEXT_INSN (insn);
114 if (GET_CODE (insn) == BARRIER)
115 {
116 prev = prev_nonnote_insn (insn);
117 if (GET_CODE (prev) == BARRIER)
118 delete_barrier (insn);
119 else if (prev != PREV_INSN (insn))
120 reorder_insns (insn, insn, prev);
121 }
122 }
123}
124
125
126/* Return the next insn after INSN that is not a NOTE and is in the loop,
127 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
128 This routine does not look inside SEQUENCEs. */
129
130static rtx
131next_nonnote_insn_in_loop (insn)
132 rtx insn;
133{
134 while (insn)
135 {
136 insn = NEXT_INSN (insn);
137 if (insn == 0 || GET_CODE (insn) != NOTE)
138 break;
139 if (GET_CODE (insn) == NOTE
140 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
141 return NULL_RTX;
142 }
143
144 return insn;
145}
146
147void
148copy_loop_headers (f)
149 rtx f;
150{
151 rtx insn, next;
152 /* Now iterate optimizing jumps until nothing changes over one pass. */
153 for (insn = f; insn; insn = next)
154 {
155 rtx temp, temp1;
156
157 next = NEXT_INSN (insn);
158
159 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
160 jump. Try to optimize by duplicating the loop exit test if so.
161 This is only safe immediately after regscan, because it uses
162 the values of regno_first_uid and regno_last_uid. */
163 if (GET_CODE (insn) == NOTE
164 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
165 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
166 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
167 {
168 temp = PREV_INSN (insn);
169 if (duplicate_loop_exit_test (insn))
170 {
171 next = NEXT_INSN (temp);
172 }
173 }
174 }
175}
176
177void
178purge_line_number_notes (f)
179 rtx f;
180{
181 rtx last_note = 0;
182 rtx insn;
183 /* Delete extraneous line number notes.
184 Note that two consecutive notes for different lines are not really
185 extraneous. There should be some indication where that line belonged,
186 even if it became empty. */
187
188 for (insn = f; insn; insn = NEXT_INSN (insn))
189 if (GET_CODE (insn) == NOTE)
190 {
191 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
192 /* Any previous line note was for the prologue; gdb wants a new
193 note after the prologue even if it is for the same line. */
194 last_note = NULL_RTX;
195 else if (NOTE_LINE_NUMBER (insn) >= 0)
196 {
197 /* Delete this note if it is identical to previous note. */
198 if (last_note
199 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
200 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
201 {
202 delete_related_insns (insn);
203 continue;
204 }
205
206 last_note = insn;
207 }
208 }
209}
210
211
212/* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
213 notes whose labels don't occur in the insn any more. Returns the
214 largest INSN_UID found. */
215static int
216init_label_info (f)
217 rtx f;
218{
219 int largest_uid = 0;
220 rtx insn;
221
222 for (insn = f; insn; insn = NEXT_INSN (insn))
223 {
224 if (GET_CODE (insn) == CODE_LABEL)
225 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
226 else if (GET_CODE (insn) == JUMP_INSN)
227 JUMP_LABEL (insn) = 0;
228 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
229 {
230 rtx note, next;
231
232 for (note = REG_NOTES (insn); note; note = next)
233 {
234 next = XEXP (note, 1);
235 if (REG_NOTE_KIND (note) == REG_LABEL
236 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
237 remove_note (insn, note);
238 }
239 }
240 if (INSN_UID (insn) > largest_uid)
241 largest_uid = INSN_UID (insn);
242 }
243
244 return largest_uid;
245}
246
247/* Mark the label each jump jumps to.
248 Combine consecutive labels, and count uses of labels. */
249
250static void
251mark_all_labels (f)
252 rtx f;
253{
254 rtx insn;
255
256 for (insn = f; insn; insn = NEXT_INSN (insn))
257 if (INSN_P (insn))
258 {
259 if (GET_CODE (insn) == CALL_INSN
260 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
261 {
262 mark_all_labels (XEXP (PATTERN (insn), 0));
263 mark_all_labels (XEXP (PATTERN (insn), 1));
264 mark_all_labels (XEXP (PATTERN (insn), 2));
265
266 /* Canonicalize the tail recursion label attached to the
267 CALL_PLACEHOLDER insn. */
268 if (XEXP (PATTERN (insn), 3))
269 {
270 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
271 XEXP (PATTERN (insn), 3));
272 mark_jump_label (label_ref, insn, 0);
273 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
274 }
275
276 continue;
277 }
278
279 mark_jump_label (PATTERN (insn), insn, 0);
280 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
281 {
282 /* When we know the LABEL_REF contained in a REG used in
283 an indirect jump, we'll have a REG_LABEL note so that
284 flow can tell where it's going. */
285 if (JUMP_LABEL (insn) == 0)
286 {
287 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
288 if (label_note)
289 {
290 /* But a LABEL_REF around the REG_LABEL note, so
291 that we can canonicalize it. */
292 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
293 XEXP (label_note, 0));
294
295 mark_jump_label (label_ref, insn, 0);
296 XEXP (label_note, 0) = XEXP (label_ref, 0);
297 JUMP_LABEL (insn) = XEXP (label_note, 0);
298 }
299 }
300 }
301 }
302}
303
304/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
305 jump. Assume that this unconditional jump is to the exit test code. If
306 the code is sufficiently simple, make a copy of it before INSN,
307 followed by a jump to the exit of the loop. Then delete the unconditional
308 jump after INSN.
309
310 Return 1 if we made the change, else 0.
311
312 This is only safe immediately after a regscan pass because it uses the
313 values of regno_first_uid and regno_last_uid. */
314
315static int
316duplicate_loop_exit_test (loop_start)
317 rtx loop_start;
318{
319 rtx insn, set, reg, p, link;
320 rtx copy = 0, first_copy = 0;
321 int num_insns = 0;
322 rtx exitcode
323 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
324 rtx lastexit;
325 int max_reg = max_reg_num ();
326 rtx *reg_map = 0;
327 rtx loop_pre_header_label;
328
329 /* Scan the exit code. We do not perform this optimization if any insn:
330
331 is a CALL_INSN
332 is a CODE_LABEL
333 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
334 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
335 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
336 is not valid.
337
338 We also do not do this if we find an insn with ASM_OPERANDS. While
339 this restriction should not be necessary, copying an insn with
340 ASM_OPERANDS can confuse asm_noperands in some cases.
341
342 Also, don't do this if the exit code is more than 20 insns. */
343
344 for (insn = exitcode;
345 insn
346 && ! (GET_CODE (insn) == NOTE
347 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
348 insn = NEXT_INSN (insn))
349 {
350 switch (GET_CODE (insn))
351 {
352 case CODE_LABEL:
353 case CALL_INSN:
354 return 0;
355 case NOTE:
356 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
357 a jump immediately after the loop start that branches outside
358 the loop but within an outer loop, near the exit test.
359 If we copied this exit test and created a phony
360 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
361 before the exit test look like these could be safely moved
362 out of the loop even if they actually may be never executed.
363 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
364
365 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
366 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
367 return 0;
368
369 if (optimize < 2
370 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
371 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
372 /* If we were to duplicate this code, we would not move
373 the BLOCK notes, and so debugging the moved code would
374 be difficult. Thus, we only move the code with -O2 or
375 higher. */
376 return 0;
377
378 break;
379 case JUMP_INSN:
380 case INSN:
381 /* The code below would grossly mishandle REG_WAS_0 notes,
382 so get rid of them here. */
383 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
384 remove_note (insn, p);
385 if (++num_insns > 20
386 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
387 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
388 return 0;
389 break;
390 default:
391 break;
392 }
393 }
394
395 /* Unless INSN is zero, we can do the optimization. */
396 if (insn == 0)
397 return 0;
398
399 lastexit = insn;
400
401 /* See if any insn sets a register only used in the loop exit code and
402 not a user variable. If so, replace it with a new register. */
403 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
404 if (GET_CODE (insn) == INSN
405 && (set = single_set (insn)) != 0
406 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
407 || (GET_CODE (reg) == SUBREG
408 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
409 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
410 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
411 {
412 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
413 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
414 break;
415
416 if (p != lastexit)
417 {
418 /* We can do the replacement. Allocate reg_map if this is the
419 first replacement we found. */
420 if (reg_map == 0)
421 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
422
423 REG_LOOP_TEST_P (reg) = 1;
424
425 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
426 }
427 }
428 loop_pre_header_label = gen_label_rtx ();
429
430 /* Now copy each insn. */
431 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
432 {
433 switch (GET_CODE (insn))
434 {
435 case BARRIER:
436 copy = emit_barrier_before (loop_start);
437 break;
438 case NOTE:
439 /* Only copy line-number notes. */
440 if (NOTE_LINE_NUMBER (insn) >= 0)
441 {
442 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
443 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
444 }
445 break;
446
447 case INSN:
448 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
449 if (reg_map)
450 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
451
452 mark_jump_label (PATTERN (copy), copy, 0);
453
454 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
455 make them. */
456 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
457 if (REG_NOTE_KIND (link) != REG_LABEL)
458 {
459 if (GET_CODE (link) == EXPR_LIST)
460 REG_NOTES (copy)
461 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
462 XEXP (link, 0),
463 REG_NOTES (copy)));
464 else
465 REG_NOTES (copy)
466 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
467 XEXP (link, 0),
468 REG_NOTES (copy)));
469 }
470
471 if (reg_map && REG_NOTES (copy))
472 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
473 break;
474
475 case JUMP_INSN:
476 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
477 loop_start);
478 if (reg_map)
479 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
480 mark_jump_label (PATTERN (copy), copy, 0);
481 if (REG_NOTES (insn))
482 {
483 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
484 if (reg_map)
485 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
486 }
487
488 /* Predict conditional jump that do make loop looping as taken.
489 Other jumps are probably exit conditions, so predict
490 them as untaken. */
491 if (any_condjump_p (copy))
492 {
493 rtx label = JUMP_LABEL (copy);
494 if (label)
495 {
496 /* The jump_insn after loop_start should be followed
497 by barrier and loopback label. */
498 if (prev_nonnote_insn (label)
499 && (prev_nonnote_insn (prev_nonnote_insn (label))
500 == next_nonnote_insn (loop_start)))
501 {
502 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
503 /* To keep pre-header, we need to redirect all loop
504 entrances before the LOOP_BEG note. */
505 redirect_jump (copy, loop_pre_header_label, 0);
506 }
507 else
508 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
509 }
510 }
511 break;
512
513 default:
514 abort ();
515 }
516
517 /* Record the first insn we copied. We need it so that we can
518 scan the copied insns for new pseudo registers. */
519 if (! first_copy)
520 first_copy = copy;
521 }
522
523 /* Now clean up by emitting a jump to the end label and deleting the jump
524 at the start of the loop. */
525 if (! copy || GET_CODE (copy) != BARRIER)
526 {
527 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
528 loop_start);
529
530 /* Record the first insn we copied. We need it so that we can
531 scan the copied insns for new pseudo registers. This may not
532 be strictly necessary since we should have copied at least one
533 insn above. But I am going to be safe. */
534 if (! first_copy)
535 first_copy = copy;
536
537 mark_jump_label (PATTERN (copy), copy, 0);
538 emit_barrier_before (loop_start);
539 }
540
541 emit_label_before (loop_pre_header_label, loop_start);
542
543 /* Now scan from the first insn we copied to the last insn we copied
544 (copy) for new pseudo registers. Do this after the code to jump to
545 the end label since that might create a new pseudo too. */
546 reg_scan_update (first_copy, copy, max_reg);
547
548 /* Mark the exit code as the virtual top of the converted loop. */
549 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
550
551 delete_related_insns (next_nonnote_insn (loop_start));
552
553 /* Clean up. */
554 if (reg_map)
555 free (reg_map);
556
557 return 1;
558}
559
560
561/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
562 notes between START and END out before START. START and END may be such
563 notes. Returns the values of the new starting and ending insns, which
564 may be different if the original ones were such notes.
565 Return true if there were only such notes and no real instructions. */
566
567bool
568squeeze_notes (startp, endp)
569 rtx* startp;
570 rtx* endp;
571{
572 rtx start = *startp;
573 rtx end = *endp;
574
575 rtx insn;
576 rtx next;
577 rtx last = NULL;
578 rtx past_end = NEXT_INSN (end);
579
580 for (insn = start; insn != past_end; insn = next)
581 {
582 next = NEXT_INSN (insn);
583 if (GET_CODE (insn) == NOTE
584 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
585 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
586 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
587 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
588 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
589 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
590 {
591 if (insn == start)
592 start = next;
593 else
594 {
595 rtx prev = PREV_INSN (insn);
596 PREV_INSN (insn) = PREV_INSN (start);
597 NEXT_INSN (insn) = start;
598 NEXT_INSN (PREV_INSN (insn)) = insn;
599 PREV_INSN (NEXT_INSN (insn)) = insn;
600 NEXT_INSN (prev) = next;
601 PREV_INSN (next) = prev;
602 }
603 }
604 else
605 last = insn;
606 }
607
608 /* There were no real instructions. */
609 if (start == past_end)
610 return true;
611
612 end = last;
613
614 *startp = start;
615 *endp = end;
616 return false;
617}
618
619
620/* Return the label before INSN, or put a new label there. */
621
622rtx
623get_label_before (insn)
624 rtx insn;
625{
626 rtx label;
627
628 /* Find an existing label at this point
629 or make a new one if there is none. */
630 label = prev_nonnote_insn (insn);
631
632 if (label == 0 || GET_CODE (label) != CODE_LABEL)
633 {
634 rtx prev = PREV_INSN (insn);
635
636 label = gen_label_rtx ();
637 emit_label_after (label, prev);
638 LABEL_NUSES (label) = 0;
639 }
640 return label;
641}
642
643/* Return the label after INSN, or put a new label there. */
644
645rtx
646get_label_after (insn)
647 rtx insn;
648{
649 rtx label;
650
651 /* Find an existing label at this point
652 or make a new one if there is none. */
653 label = next_nonnote_insn (insn);
654
655 if (label == 0 || GET_CODE (label) != CODE_LABEL)
656 {
657 label = gen_label_rtx ();
658 emit_label_after (label, insn);
659 LABEL_NUSES (label) = 0;
660 }
661 return label;
662}
663
664
665/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
666 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
667 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
668 know whether it's source is floating point or integer comparison. Machine
669 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
670 to help this function avoid overhead in these cases. */
671enum rtx_code
672reversed_comparison_code_parts (code, arg0, arg1, insn)
673 rtx insn, arg0, arg1;
674 enum rtx_code code;
675{
676 enum machine_mode mode;
677
678 /* If this is not actually a comparison, we can't reverse it. */
679 if (GET_RTX_CLASS (code) != '<')
680 return UNKNOWN;
681
682 mode = GET_MODE (arg0);
683 if (mode == VOIDmode)
684 mode = GET_MODE (arg1);
685
686 /* First see if machine description supply us way to reverse the comparison.
687 Give it priority over everything else to allow machine description to do
688 tricks. */
689#ifdef REVERSIBLE_CC_MODE
690 if (GET_MODE_CLASS (mode) == MODE_CC
691 && REVERSIBLE_CC_MODE (mode))
692 {
693#ifdef REVERSE_CONDITION
694 return REVERSE_CONDITION (code, mode);
695#endif
696 return reverse_condition (code);
697 }
698#endif
699
700 /* Try a few special cases based on the comparison code. */
701 switch (code)
702 {
703 case GEU:
704 case GTU:
705 case LEU:
706 case LTU:
707 case NE:
708 case EQ:
709 /* It is always safe to reverse EQ and NE, even for the floating
710 point. Similary the unsigned comparisons are never used for
711 floating point so we can reverse them in the default way. */
712 return reverse_condition (code);
713 case ORDERED:
714 case UNORDERED:
715 case LTGT:
716 case UNEQ:
717 /* In case we already see unordered comparison, we can be sure to
718 be dealing with floating point so we don't need any more tests. */
719 return reverse_condition_maybe_unordered (code);
720 case UNLT:
721 case UNLE:
722 case UNGT:
723 case UNGE:
724 /* We don't have safe way to reverse these yet. */
725 return UNKNOWN;
726 default:
727 break;
728 }
729
730 /* In case we give up IEEE compatibility, all comparisons are reversible. */
731 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
732 || flag_unsafe_math_optimizations)
733 return reverse_condition (code);
734
735 if (GET_MODE_CLASS (mode) == MODE_CC
736#ifdef HAVE_cc0
737 || arg0 == cc0_rtx
738#endif
739 )
740 {
741 rtx prev;
742 /* Try to search for the comparison to determine the real mode.
743 This code is expensive, but with sane machine description it
744 will be never used, since REVERSIBLE_CC_MODE will return true
745 in all cases. */
746 if (! insn)
747 return UNKNOWN;
748
749 for (prev = prev_nonnote_insn (insn);
750 prev != 0 && GET_CODE (prev) != CODE_LABEL;
751 prev = prev_nonnote_insn (prev))
752 {
753 rtx set = set_of (arg0, prev);
754 if (set && GET_CODE (set) == SET
755 && rtx_equal_p (SET_DEST (set), arg0))
756 {
757 rtx src = SET_SRC (set);
758
759 if (GET_CODE (src) == COMPARE)
760 {
761 rtx comparison = src;
762 arg0 = XEXP (src, 0);
763 mode = GET_MODE (arg0);
764 if (mode == VOIDmode)
765 mode = GET_MODE (XEXP (comparison, 1));
766 break;
767 }
768 /* We can get past reg-reg moves. This may be useful for model
769 of i387 comparisons that first move flag registers around. */
770 if (REG_P (src))
771 {
772 arg0 = src;
773 continue;
774 }
775 }
776 /* If register is clobbered in some ununderstandable way,
777 give up. */
778 if (set)
779 return UNKNOWN;
780 }
781 }
782
783 /* An integer condition. */
784 if (GET_CODE (arg0) == CONST_INT
785 || (GET_MODE (arg0) != VOIDmode
786 && GET_MODE_CLASS (mode) != MODE_CC
787 && ! FLOAT_MODE_P (mode)))
788 return reverse_condition (code);
789
790 return UNKNOWN;
791}
792
793/* An wrapper around the previous function to take COMPARISON as rtx
794 expression. This simplifies many callers. */
795enum rtx_code
796reversed_comparison_code (comparison, insn)
797 rtx comparison, insn;
798{
799 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
800 return UNKNOWN;
801 return reversed_comparison_code_parts (GET_CODE (comparison),
802 XEXP (comparison, 0),
803 XEXP (comparison, 1), insn);
804}
805
806
807/* Given an rtx-code for a comparison, return the code for the negated
808 comparison. If no such code exists, return UNKNOWN.
809
810 WATCH OUT! reverse_condition is not safe to use on a jump that might
811 be acting on the results of an IEEE floating point comparison, because
812 of the special treatment of non-signaling nans in comparisons.
813 Use reversed_comparison_code instead. */
814
815enum rtx_code
816reverse_condition (code)
817 enum rtx_code code;
818{
819 switch (code)
820 {
821 case EQ:
822 return NE;
823 case NE:
824 return EQ;
825 case GT:
826 return LE;
827 case GE:
828 return LT;
829 case LT:
830 return GE;
831 case LE:
832 return GT;
833 case GTU:
834 return LEU;
835 case GEU:
836 return LTU;
837 case LTU:
838 return GEU;
839 case LEU:
840 return GTU;
841 case UNORDERED:
842 return ORDERED;
843 case ORDERED:
844 return UNORDERED;
845
846 case UNLT:
847 case UNLE:
848 case UNGT:
849 case UNGE:
850 case UNEQ:
851 case LTGT:
852 return UNKNOWN;
853
854 default:
855 abort ();
856 }
857}
858
859/* Similar, but we're allowed to generate unordered comparisons, which
860 makes it safe for IEEE floating-point. Of course, we have to recognize
861 that the target will support them too... */
862
863enum rtx_code
864reverse_condition_maybe_unordered (code)
865 enum rtx_code code;
866{
867 /* Non-IEEE formats don't have unordered conditions. */
868 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
869 return reverse_condition (code);
870
871 switch (code)
872 {
873 case EQ:
874 return NE;
875 case NE:
876 return EQ;
877 case GT:
878 return UNLE;
879 case GE:
880 return UNLT;
881 case LT:
882 return UNGE;
883 case LE:
884 return UNGT;
885 case LTGT:
886 return UNEQ;
887 case UNORDERED:
888 return ORDERED;
889 case ORDERED:
890 return UNORDERED;
891 case UNLT:
892 return GE;
893 case UNLE:
894 return GT;
895 case UNGT:
896 return LE;
897 case UNGE:
898 return LT;
899 case UNEQ:
900 return LTGT;
901
902 default:
903 abort ();
904 }
905}
906
907/* Similar, but return the code when two operands of a comparison are swapped.
908 This IS safe for IEEE floating-point. */
909
910enum rtx_code
911swap_condition (code)
912 enum rtx_code code;
913{
914 switch (code)
915 {
916 case EQ:
917 case NE:
918 case UNORDERED:
919 case ORDERED:
920 case UNEQ:
921 case LTGT:
922 return code;
923
924 case GT:
925 return LT;
926 case GE:
927 return LE;
928 case LT:
929 return GT;
930 case LE:
931 return GE;
932 case GTU:
933 return LTU;
934 case GEU:
935 return LEU;
936 case LTU:
937 return GTU;
938 case LEU:
939 return GEU;
940 case UNLT:
941 return UNGT;
942 case UNLE:
943 return UNGE;
944 case UNGT:
945 return UNLT;
946 case UNGE:
947 return UNLE;
948
949 default:
950 abort ();
951 }
952}
953
954/* Given a comparison CODE, return the corresponding unsigned comparison.
955 If CODE is an equality comparison or already an unsigned comparison,
956 CODE is returned. */
957
958enum rtx_code
959unsigned_condition (code)
960 enum rtx_code code;
961{
962 switch (code)
963 {
964 case EQ:
965 case NE:
966 case GTU:
967 case GEU:
968 case LTU:
969 case LEU:
970 return code;
971
972 case GT:
973 return GTU;
974 case GE:
975 return GEU;
976 case LT:
977 return LTU;
978 case LE:
979 return LEU;
980
981 default:
982 abort ();
983 }
984}
985
986/* Similarly, return the signed version of a comparison. */
987
988enum rtx_code
989signed_condition (code)
990 enum rtx_code code;
991{
992 switch (code)
993 {
994 case EQ:
995 case NE:
996 case GT:
997 case GE:
998 case LT:
999 case LE:
1000 return code;
1001
1002 case GTU:
1003 return GT;
1004 case GEU:
1005 return GE;
1006 case LTU:
1007 return LT;
1008 case LEU:
1009 return LE;
1010
1011 default:
1012 abort ();
1013 }
1014}
1015
1016
1017/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
1018 truth of CODE1 implies the truth of CODE2. */
1019
1020int
1021comparison_dominates_p (code1, code2)
1022 enum rtx_code code1, code2;
1023{
1024 /* UNKNOWN comparison codes can happen as a result of trying to revert
1025 comparison codes.
1026 They can't match anything, so we have to reject them here. */
1027 if (code1 == UNKNOWN || code2 == UNKNOWN)
1028 return 0;
1029
1030 if (code1 == code2)
1031 return 1;
1032
1033 switch (code1)
1034 {
1035 case UNEQ:
1036 if (code2 == UNLE || code2 == UNGE)
1037 return 1;
1038 break;
1039
1040 case EQ:
1041 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1042 || code2 == ORDERED)
1043 return 1;
1044 break;
1045
1046 case UNLT:
1047 if (code2 == UNLE || code2 == NE)
1048 return 1;
1049 break;
1050
1051 case LT:
1052 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1053 return 1;
1054 break;
1055
1056 case UNGT:
1057 if (code2 == UNGE || code2 == NE)
1058 return 1;
1059 break;
1060
1061 case GT:
1062 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1063 return 1;
1064 break;
1065
1066 case GE:
1067 case LE:
1068 if (code2 == ORDERED)
1069 return 1;
1070 break;
1071
1072 case LTGT:
1073 if (code2 == NE || code2 == ORDERED)
1074 return 1;
1075 break;
1076
1077 case LTU:
1078 if (code2 == LEU || code2 == NE)
1079 return 1;
1080 break;
1081
1082 case GTU:
1083 if (code2 == GEU || code2 == NE)
1084 return 1;
1085 break;
1086
1087 case UNORDERED:
1088 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1089 || code2 == UNGE || code2 == UNGT)
1090 return 1;
1091 break;
1092
1093 default:
1094 break;
1095 }
1096
1097 return 0;
1098}
1099
1100
1101/* Return 1 if INSN is an unconditional jump and nothing else. */
1102
1103int
1104simplejump_p (insn)
1105 rtx insn;
1106{
1107 return (GET_CODE (insn) == JUMP_INSN
1108 && GET_CODE (PATTERN (insn)) == SET
1109 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1110 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1111}
1112/* Return 1 if INSN is an tablejump. */
1113
1114int
1115tablejump_p (insn)
1116 rtx insn;
1117{
1118 rtx table;
1119 return (GET_CODE (insn) == JUMP_INSN
1120 && JUMP_LABEL (insn)
1121 && NEXT_INSN (JUMP_LABEL (insn))
1122 && (table = next_active_insn (JUMP_LABEL (insn)))
1123 && GET_CODE (table) == JUMP_INSN
1124 && (GET_CODE (PATTERN (table)) == ADDR_VEC
1125 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC));
1126}
1127
1128/* Return nonzero if INSN is a (possibly) conditional jump
1129 and nothing more.
1130
1131 Use this function is deprecated, since we need to support combined
1132 branch and compare insns. Use any_condjump_p instead whenever possible. */
1133
1134int
1135condjump_p (insn)
1136 rtx insn;
1137{
1138 rtx x = PATTERN (insn);
1139
1140 if (GET_CODE (x) != SET
1141 || GET_CODE (SET_DEST (x)) != PC)
1142 return 0;
1143
1144 x = SET_SRC (x);
1145 if (GET_CODE (x) == LABEL_REF)
1146 return 1;
1147 else
1148 return (GET_CODE (x) == IF_THEN_ELSE
1149 && ((GET_CODE (XEXP (x, 2)) == PC
1150 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1151 || GET_CODE (XEXP (x, 1)) == RETURN))
1152 || (GET_CODE (XEXP (x, 1)) == PC
1153 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1154 || GET_CODE (XEXP (x, 2)) == RETURN))));
1155
1156 return 0;
1157}
1158
1159/* Return nonzero if INSN is a (possibly) conditional jump inside a
1160 PARALLEL.
1161
1162 Use this function is deprecated, since we need to support combined
1163 branch and compare insns. Use any_condjump_p instead whenever possible. */
1164
1165int
1166condjump_in_parallel_p (insn)
1167 rtx insn;
1168{
1169 rtx x = PATTERN (insn);
1170
1171 if (GET_CODE (x) != PARALLEL)
1172 return 0;
1173 else
1174 x = XVECEXP (x, 0, 0);
1175
1176 if (GET_CODE (x) != SET)
1177 return 0;
1178 if (GET_CODE (SET_DEST (x)) != PC)
1179 return 0;
1180 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1181 return 1;
1182 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1183 return 0;
1184 if (XEXP (SET_SRC (x), 2) == pc_rtx
1185 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1186 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1187 return 1;
1188 if (XEXP (SET_SRC (x), 1) == pc_rtx
1189 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1190 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1191 return 1;
1192 return 0;
1193}
1194
1195/* Return set of PC, otherwise NULL. */
1196
1197rtx
1198pc_set (insn)
1199 rtx insn;
1200{
1201 rtx pat;
1202 if (GET_CODE (insn) != JUMP_INSN)
1203 return NULL_RTX;
1204 pat = PATTERN (insn);
1205
1206 /* The set is allowed to appear either as the insn pattern or
1207 the first set in a PARALLEL. */
1208 if (GET_CODE (pat) == PARALLEL)
1209 pat = XVECEXP (pat, 0, 0);
1210 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1211 return pat;
1212
1213 return NULL_RTX;
1214}
1215
1216/* Return true when insn is an unconditional direct jump,
1217 possibly bundled inside a PARALLEL. */
1218
1219int
1220any_uncondjump_p (insn)
1221 rtx insn;
1222{
1223 rtx x = pc_set (insn);
1224 if (!x)
1225 return 0;
1226 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1227 return 0;
1228 return 1;
1229}
1230
1231/* Return true when insn is a conditional jump. This function works for
1232 instructions containing PC sets in PARALLELs. The instruction may have
1233 various other effects so before removing the jump you must verify
1234 onlyjump_p.
1235
1236 Note that unlike condjump_p it returns false for unconditional jumps. */
1237
1238int
1239any_condjump_p (insn)
1240 rtx insn;
1241{
1242 rtx x = pc_set (insn);
1243 enum rtx_code a, b;
1244
1245 if (!x)
1246 return 0;
1247 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1248 return 0;
1249
1250 a = GET_CODE (XEXP (SET_SRC (x), 1));
1251 b = GET_CODE (XEXP (SET_SRC (x), 2));
1252
1253 return ((b == PC && (a == LABEL_REF || a == RETURN))
1254 || (a == PC && (b == LABEL_REF || b == RETURN)));
1255}
1256
1257/* Return the label of a conditional jump. */
1258
1259rtx
1260condjump_label (insn)
1261 rtx insn;
1262{
1263 rtx x = pc_set (insn);
1264
1265 if (!x)
1266 return NULL_RTX;
1267 x = SET_SRC (x);
1268 if (GET_CODE (x) == LABEL_REF)
1269 return x;
1270 if (GET_CODE (x) != IF_THEN_ELSE)
1271 return NULL_RTX;
1272 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1273 return XEXP (x, 1);
1274 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1275 return XEXP (x, 2);
1276 return NULL_RTX;
1277}
1278
1279/* Return true if INSN is a (possibly conditional) return insn. */
1280
1281static int
1282returnjump_p_1 (loc, data)
1283 rtx *loc;
1284 void *data ATTRIBUTE_UNUSED;
1285{
1286 rtx x = *loc;
1287
1288 return x && (GET_CODE (x) == RETURN
1289 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1290}
1291
1292int
1293returnjump_p (insn)
1294 rtx insn;
1295{
1296 if (GET_CODE (insn) != JUMP_INSN)
1297 return 0;
1298 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1299}
1300
1301/* Return true if INSN is a jump that only transfers control and
1302 nothing more. */
1303
1304int
1305onlyjump_p (insn)
1306 rtx insn;
1307{
1308 rtx set;
1309
1310 if (GET_CODE (insn) != JUMP_INSN)
1311 return 0;
1312
1313 set = single_set (insn);
1314 if (set == NULL)
1315 return 0;
1316 if (GET_CODE (SET_DEST (set)) != PC)
1317 return 0;
1318 if (side_effects_p (SET_SRC (set)))
1319 return 0;
1320
1321 return 1;
1322}
1323
1324#ifdef HAVE_cc0
1325
1326/* Return non-zero if X is an RTX that only sets the condition codes
1327 and has no side effects. */
1328
1329int
1330only_sets_cc0_p (x)
1331 rtx x;
1332{
1333
1334 if (! x)
1335 return 0;
1336
1337 if (INSN_P (x))
1338 x = PATTERN (x);
1339
1340 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1341}
1342
1343/* Return 1 if X is an RTX that does nothing but set the condition codes
1344 and CLOBBER or USE registers.
1345 Return -1 if X does explicitly set the condition codes,
1346 but also does other things. */
1347
1348int
1349sets_cc0_p (x)
1350 rtx x;
1351{
1352
1353 if (! x)
1354 return 0;
1355
1356 if (INSN_P (x))
1357 x = PATTERN (x);
1358
1359 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1360 return 1;
1361 if (GET_CODE (x) == PARALLEL)
1362 {
1363 int i;
1364 int sets_cc0 = 0;
1365 int other_things = 0;
1366 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1367 {
1368 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1369 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1370 sets_cc0 = 1;
1371 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1372 other_things = 1;
1373 }
1374 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1375 }
1376 return 0;
1377}
1378#endif
1379
1380
1381/* Follow any unconditional jump at LABEL;
1382 return the ultimate label reached by any such chain of jumps.
1383 If LABEL is not followed by a jump, return LABEL.
1384 If the chain loops or we can't find end, return LABEL,
1385 since that tells caller to avoid changing the insn.
1386
1387 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1388 a USE or CLOBBER. */
1389
1390rtx
1391follow_jumps (label)
1392 rtx label;
1393{
1394 rtx insn;
1395 rtx next;
1396 rtx value = label;
1397 int depth;
1398
1399 for (depth = 0;
1400 (depth < 10
1401 && (insn = next_active_insn (value)) != 0
1402 && GET_CODE (insn) == JUMP_INSN
1403 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1404 && onlyjump_p (insn))
1405 || GET_CODE (PATTERN (insn)) == RETURN)
1406 && (next = NEXT_INSN (insn))
1407 && GET_CODE (next) == BARRIER);
1408 depth++)
1409 {
1410 /* Don't chain through the insn that jumps into a loop
1411 from outside the loop,
1412 since that would create multiple loop entry jumps
1413 and prevent loop optimization. */
1414 rtx tem;
1415 if (!reload_completed)
1416 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1417 if (GET_CODE (tem) == NOTE
1418 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1419 /* ??? Optional. Disables some optimizations, but makes
1420 gcov output more accurate with -O. */
1421 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1422 return value;
1423
1424 /* If we have found a cycle, make the insn jump to itself. */
1425 if (JUMP_LABEL (insn) == label)
1426 return label;
1427
1428 tem = next_active_insn (JUMP_LABEL (insn));
1429 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1430 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1431 break;
1432
1433 value = JUMP_LABEL (insn);
1434 }
1435 if (depth == 10)
1436 return label;
1437 return value;
1438}
1439
1440
1441
1442/* Find all CODE_LABELs referred to in X, and increment their use counts.
1443 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1444 in INSN, then store one of them in JUMP_LABEL (INSN).
1445 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1446 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1447 Also, when there are consecutive labels, canonicalize on the last of them.
1448
1449 Note that two labels separated by a loop-beginning note
1450 must be kept distinct if we have not yet done loop-optimization,
1451 because the gap between them is where loop-optimize
1452 will want to move invariant code to. CROSS_JUMP tells us
1453 that loop-optimization is done with. */
1454
1455void
1456mark_jump_label (x, insn, in_mem)
1457 rtx x;
1458 rtx insn;
1459 int in_mem;
1460{
1461 RTX_CODE code = GET_CODE (x);
1462 int i;
1463 const char *fmt;
1464
1465 switch (code)
1466 {
1467 case PC:
1468 case CC0:
1469 case REG:
1470 case SUBREG:
1471 case CONST_INT:
1472 case CONST_DOUBLE:
1473 case CLOBBER:
1474 case CALL:
1475 return;
1476
1477 case MEM:
1478 in_mem = 1;
1479 break;
1480
1481 case SYMBOL_REF:
1482 if (!in_mem)
1483 return;
1484
1485 /* If this is a constant-pool reference, see if it is a label. */
1486 if (CONSTANT_POOL_ADDRESS_P (x))
1487 mark_jump_label (get_pool_constant (x), insn, in_mem);
1488 break;
1489
1490 case LABEL_REF:
1491 {
1492 rtx label = XEXP (x, 0);
1493
1494 /* Ignore remaining references to unreachable labels that
1495 have been deleted. */
1496 if (GET_CODE (label) == NOTE
1497 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1498 break;
1499
1500 if (GET_CODE (label) != CODE_LABEL)
1501 abort ();
1502
1503 /* Ignore references to labels of containing functions. */
1504 if (LABEL_REF_NONLOCAL_P (x))
1505 break;
1506
1507 XEXP (x, 0) = label;
1508 if (! insn || ! INSN_DELETED_P (insn))
1509 ++LABEL_NUSES (label);
1510
1511 if (insn)
1512 {
1513 if (GET_CODE (insn) == JUMP_INSN)
1514 JUMP_LABEL (insn) = label;
1515 else
1516 {
1517 /* Add a REG_LABEL note for LABEL unless there already
1518 is one. All uses of a label, except for labels
1519 that are the targets of jumps, must have a
1520 REG_LABEL note. */
1521 if (! find_reg_note (insn, REG_LABEL, label))
1522 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1523 REG_NOTES (insn));
1524 }
1525 }
1526 return;
1527 }
1528
1529 /* Do walk the labels in a vector, but not the first operand of an
1530 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1531 case ADDR_VEC:
1532 case ADDR_DIFF_VEC:
1533 if (! INSN_DELETED_P (insn))
1534 {
1535 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1536
1537 for (i = 0; i < XVECLEN (x, eltnum); i++)
1538 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1539 }
1540 return;
1541
1542 default:
1543 break;
1544 }
1545
1546 fmt = GET_RTX_FORMAT (code);
1547 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1548 {
1549 if (fmt[i] == 'e')
1550 mark_jump_label (XEXP (x, i), insn, in_mem);
1551 else if (fmt[i] == 'E')
1552 {
1553 int j;
1554 for (j = 0; j < XVECLEN (x, i); j++)
1555 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1556 }
1557 }
1558}
1559
1560/* If all INSN does is set the pc, delete it,
1561 and delete the insn that set the condition codes for it
1562 if that's what the previous thing was. */
1563
1564void
1565delete_jump (insn)
1566 rtx insn;
1567{
1568 rtx set = single_set (insn);
1569
1570 if (set && GET_CODE (SET_DEST (set)) == PC)
1571 delete_computation (insn);
1572}
1573
1574/* Verify INSN is a BARRIER and delete it. */
1575
1576void
1577delete_barrier (insn)
1578 rtx insn;
1579{
1580 if (GET_CODE (insn) != BARRIER)
1581 abort ();
1582
1583 delete_insn (insn);
1584}
1585
1586/* Recursively delete prior insns that compute the value (used only by INSN
1587 which the caller is deleting) stored in the register mentioned by NOTE
1588 which is a REG_DEAD note associated with INSN. */
1589
1590static void
1591delete_prior_computation (note, insn)
1592 rtx note;
1593 rtx insn;
1594{
1595 rtx our_prev;
1596 rtx reg = XEXP (note, 0);
1597
1598 for (our_prev = prev_nonnote_insn (insn);
1599 our_prev && (GET_CODE (our_prev) == INSN
1600 || GET_CODE (our_prev) == CALL_INSN);
1601 our_prev = prev_nonnote_insn (our_prev))
1602 {
1603 rtx pat = PATTERN (our_prev);
1604
1605 /* If we reach a CALL which is not calling a const function
1606 or the callee pops the arguments, then give up. */
1607 if (GET_CODE (our_prev) == CALL_INSN
1608 && (! CONST_OR_PURE_CALL_P (our_prev)
1609 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1610 break;
1611
1612 /* If we reach a SEQUENCE, it is too complex to try to
1613 do anything with it, so give up. */
1614 if (GET_CODE (pat) == SEQUENCE)
1615 break;
1616
1617 if (GET_CODE (pat) == USE
1618 && GET_CODE (XEXP (pat, 0)) == INSN)
1619 /* reorg creates USEs that look like this. We leave them
1620 alone because reorg needs them for its own purposes. */
1621 break;
1622
1623 if (reg_set_p (reg, pat))
1624 {
1625 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1626 break;
1627
1628 if (GET_CODE (pat) == PARALLEL)
1629 {
1630 /* If we find a SET of something else, we can't
1631 delete the insn. */
1632
1633 int i;
1634
1635 for (i = 0; i < XVECLEN (pat, 0); i++)
1636 {
1637 rtx part = XVECEXP (pat, 0, i);
1638
1639 if (GET_CODE (part) == SET
1640 && SET_DEST (part) != reg)
1641 break;
1642 }
1643
1644 if (i == XVECLEN (pat, 0))
1645 delete_computation (our_prev);
1646 }
1647 else if (GET_CODE (pat) == SET
1648 && GET_CODE (SET_DEST (pat)) == REG)
1649 {
1650 int dest_regno = REGNO (SET_DEST (pat));
1651 int dest_endregno
1652 = (dest_regno
1653 + (dest_regno < FIRST_PSEUDO_REGISTER
1654 ? HARD_REGNO_NREGS (dest_regno,
1655 GET_MODE (SET_DEST (pat))) : 1));
1656 int regno = REGNO (reg);
1657 int endregno
1658 = (regno
1659 + (regno < FIRST_PSEUDO_REGISTER
1660 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1661
1662 if (dest_regno >= regno
1663 && dest_endregno <= endregno)
1664 delete_computation (our_prev);
1665
1666 /* We may have a multi-word hard register and some, but not
1667 all, of the words of the register are needed in subsequent
1668 insns. Write REG_UNUSED notes for those parts that were not
1669 needed. */
1670 else if (dest_regno <= regno
1671 && dest_endregno >= endregno)
1672 {
1673 int i;
1674
1675 REG_NOTES (our_prev)
1676 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1677 REG_NOTES (our_prev));
1678
1679 for (i = dest_regno; i < dest_endregno; i++)
1680 if (! find_regno_note (our_prev, REG_UNUSED, i))
1681 break;
1682
1683 if (i == dest_endregno)
1684 delete_computation (our_prev);
1685 }
1686 }
1687
1688 break;
1689 }
1690
1691 /* If PAT references the register that dies here, it is an
1692 additional use. Hence any prior SET isn't dead. However, this
1693 insn becomes the new place for the REG_DEAD note. */
1694 if (reg_overlap_mentioned_p (reg, pat))
1695 {
1696 XEXP (note, 1) = REG_NOTES (our_prev);
1697 REG_NOTES (our_prev) = note;
1698 break;
1699 }
1700 }
1701}
1702
1703/* Delete INSN and recursively delete insns that compute values used only
1704 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1705 If we are running before flow.c, we need do nothing since flow.c will
1706 delete dead code. We also can't know if the registers being used are
1707 dead or not at this point.
1708
1709 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1710 nothing other than set a register that dies in this insn, we can delete
1711 that insn as well.
1712
1713 On machines with CC0, if CC0 is used in this insn, we may be able to
1714 delete the insn that set it. */
1715
1716static void
1717delete_computation (insn)
1718 rtx insn;
1719{
1720 rtx note, next;
1721
1722#ifdef HAVE_cc0
1723 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1724 {
1725 rtx prev = prev_nonnote_insn (insn);
1726 /* We assume that at this stage
1727 CC's are always set explicitly
1728 and always immediately before the jump that
1729 will use them. So if the previous insn
1730 exists to set the CC's, delete it
1731 (unless it performs auto-increments, etc.). */
1732 if (prev && GET_CODE (prev) == INSN
1733 && sets_cc0_p (PATTERN (prev)))
1734 {
1735 if (sets_cc0_p (PATTERN (prev)) > 0
1736 && ! side_effects_p (PATTERN (prev)))
1737 delete_computation (prev);
1738 else
1739 /* Otherwise, show that cc0 won't be used. */
1740 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1741 cc0_rtx, REG_NOTES (prev));
1742 }
1743 }
1744#endif
1745
1746 for (note = REG_NOTES (insn); note; note = next)
1747 {
1748 next = XEXP (note, 1);
1749
1750 if (REG_NOTE_KIND (note) != REG_DEAD
1751 /* Verify that the REG_NOTE is legitimate. */
1752 || GET_CODE (XEXP (note, 0)) != REG)
1753 continue;
1754
1755 delete_prior_computation (note, insn);
1756 }
1757
1758 delete_related_insns (insn);
1759}
1760
1761
1762/* Delete insn INSN from the chain of insns and update label ref counts
1763 and delete insns now unreachable.
1764
1765 Returns the first insn after INSN that was not deleted.
1766
1767 Usage of this instruction is deprecated. Use delete_insn instead and
1768 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1769
1770rtx
1771delete_related_insns (insn)
1772 rtx insn;
1773{
1774 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1775 rtx note;
1776 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1777
1778 while (next && INSN_DELETED_P (next))
1779 next = NEXT_INSN (next);
1780
1781 /* This insn is already deleted => return first following nondeleted. */
1782 if (INSN_DELETED_P (insn))
1783 return next;
1784
1785 delete_insn (insn);
1786
1787 /* If instruction is followed by a barrier,
1788 delete the barrier too. */
1789
1790 if (next != 0 && GET_CODE (next) == BARRIER)
1791 delete_insn (next);
1792
1793 /* If deleting a jump, decrement the count of the label,
1794 and delete the label if it is now unused. */
1795
1796 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1797 {
1798 rtx lab = JUMP_LABEL (insn), lab_next;
1799
1800 if (LABEL_NUSES (lab) == 0)
1801 {
1802 /* This can delete NEXT or PREV,
1803 either directly if NEXT is JUMP_LABEL (INSN),
1804 or indirectly through more levels of jumps. */
1805 delete_related_insns (lab);
1806
1807 /* I feel a little doubtful about this loop,
1808 but I see no clean and sure alternative way
1809 to find the first insn after INSN that is not now deleted.
1810 I hope this works. */
1811 while (next && INSN_DELETED_P (next))
1812 next = NEXT_INSN (next);
1813 return next;
1814 }
1815 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1816 && GET_CODE (lab_next) == JUMP_INSN
1817 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1818 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1819 {
1820 /* If we're deleting the tablejump, delete the dispatch table.
1821 We may not be able to kill the label immediately preceding
1822 just yet, as it might be referenced in code leading up to
1823 the tablejump. */
1824 delete_related_insns (lab_next);
1825 }
1826 }
1827
1828 /* Likewise if we're deleting a dispatch table. */
1829
1830 if (GET_CODE (insn) == JUMP_INSN
1831 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1832 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1833 {
1834 rtx pat = PATTERN (insn);
1835 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1836 int len = XVECLEN (pat, diff_vec_p);
1837
1838 for (i = 0; i < len; i++)
1839 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1840 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1841 while (next && INSN_DELETED_P (next))
1842 next = NEXT_INSN (next);
1843 return next;
1844 }
1845
1846 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1847 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1848 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1849 if (REG_NOTE_KIND (note) == REG_LABEL
1850 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1851 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1852 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1853 delete_related_insns (XEXP (note, 0));
1854
1855 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1856 prev = PREV_INSN (prev);
1857
1858 /* If INSN was a label and a dispatch table follows it,
1859 delete the dispatch table. The tablejump must have gone already.
1860 It isn't useful to fall through into a table. */
1861
1862 if (was_code_label
1863 && NEXT_INSN (insn) != 0
1864 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1865 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1866 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1867 next = delete_related_insns (NEXT_INSN (insn));
1868
1869 /* If INSN was a label, delete insns following it if now unreachable. */
1870
1871 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1872 {
1873 RTX_CODE code;
1874 while (next != 0
1875 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1876 || code == NOTE || code == BARRIER
1877 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1878 {
1879 if (code == NOTE
1880 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1881 next = NEXT_INSN (next);
1882 /* Keep going past other deleted labels to delete what follows. */
1883 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1884 next = NEXT_INSN (next);
1885 else
1886 /* Note: if this deletes a jump, it can cause more
1887 deletion of unreachable code, after a different label.
1888 As long as the value from this recursive call is correct,
1889 this invocation functions correctly. */
1890 next = delete_related_insns (next);
1891 }
1892 }
1893
1894 return next;
1895}
1896
1897/* Advance from INSN till reaching something not deleted
1898 then return that. May return INSN itself. */
1899
1900rtx
1901next_nondeleted_insn (insn)
1902 rtx insn;
1903{
1904 while (INSN_DELETED_P (insn))
1905 insn = NEXT_INSN (insn);
1906 return insn;
1907}
1908
1909
1910/* Delete a range of insns from FROM to TO, inclusive.
1911 This is for the sake of peephole optimization, so assume
1912 that whatever these insns do will still be done by a new
1913 peephole insn that will replace them. */
1914
1915void
1916delete_for_peephole (from, to)
1917 rtx from, to;
1918{
1919 rtx insn = from;
1920
1921 while (1)
1922 {
1923 rtx next = NEXT_INSN (insn);
1924 rtx prev = PREV_INSN (insn);
1925
1926 if (GET_CODE (insn) != NOTE)
1927 {
1928 INSN_DELETED_P (insn) = 1;
1929
1930 /* Patch this insn out of the chain. */
1931 /* We don't do this all at once, because we
1932 must preserve all NOTEs. */
1933 if (prev)
1934 NEXT_INSN (prev) = next;
1935
1936 if (next)
1937 PREV_INSN (next) = prev;
1938 }
1939
1940 if (insn == to)
1941 break;
1942 insn = next;
1943 }
1944
1945 /* Note that if TO is an unconditional jump
1946 we *do not* delete the BARRIER that follows,
1947 since the peephole that replaces this sequence
1948 is also an unconditional jump in that case. */
1949}
1950
1951
1952/* We have determined that INSN is never reached, and are about to
1953 delete it. Print a warning if the user asked for one.
1954
1955 To try to make this warning more useful, this should only be called
1956 once per basic block not reached, and it only warns when the basic
1957 block contains more than one line from the current function, and
1958 contains at least one operation. CSE and inlining can duplicate insns,
1959 so it's possible to get spurious warnings from this. */
1960
1961void
1962never_reached_warning (avoided_insn, finish)
1963 rtx avoided_insn, finish;
1964{
1965 rtx insn;
1966 rtx a_line_note = NULL;
1967 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1968
1969 if (! warn_notreached)
1970 return;
1971
1972 /* Scan forwards, looking at LINE_NUMBER notes, until
1973 we hit a LABEL or we run out of insns. */
1974
1975 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1976 {
1977 if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
1978 break;
1979
1980 if (GET_CODE (insn) == NOTE /* A line number note? */
1981 && NOTE_LINE_NUMBER (insn) >= 0)
1982 {
1983 if (a_line_note == NULL)
1984 a_line_note = insn;
1985 else
1986 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1987 != NOTE_LINE_NUMBER (insn));
1988 }
1989 else if (INSN_P (insn))
1990 {
1991 if (reached_end)
1992 break;
1993 contains_insn = 1;
1994 }
1995
1996 if (insn == finish)
1997 reached_end = 1;
1998 }
1999 if (two_avoided_lines && contains_insn)
2000 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
2001 NOTE_LINE_NUMBER (a_line_note),
2002 "will never be executed");
2003}
2004
2005
2006/* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
2007 NLABEL as a return. Accrue modifications into the change group. */
2008
2009static void
2010redirect_exp_1 (loc, olabel, nlabel, insn)
2011 rtx *loc;
2012 rtx olabel, nlabel;
2013 rtx insn;
2014{
2015 rtx x = *loc;
2016 RTX_CODE code = GET_CODE (x);
2017 int i;
2018 const char *fmt;
2019
2020 if (code == LABEL_REF)
2021 {
2022 if (XEXP (x, 0) == olabel)
2023 {
2024 rtx n;
2025 if (nlabel)
2026 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2027 else
2028 n = gen_rtx_RETURN (VOIDmode);
2029
2030 validate_change (insn, loc, n, 1);
2031 return;
2032 }
2033 }
2034 else if (code == RETURN && olabel == 0)
2035 {
2036 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2037 if (loc == &PATTERN (insn))
2038 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2039 validate_change (insn, loc, x, 1);
2040 return;
2041 }
2042
2043 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
2044 && GET_CODE (SET_SRC (x)) == LABEL_REF
2045 && XEXP (SET_SRC (x), 0) == olabel)
2046 {
2047 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
2048 return;
2049 }
2050
2051 fmt = GET_RTX_FORMAT (code);
2052 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2053 {
2054 if (fmt[i] == 'e')
2055 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2056 else if (fmt[i] == 'E')
2057 {
2058 int j;
2059 for (j = 0; j < XVECLEN (x, i); j++)
2060 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2061 }
2062 }
2063}
2064
2065/* Similar, but apply the change group and report success or failure. */
2066
2067static int
2068redirect_exp (olabel, nlabel, insn)
2069 rtx olabel, nlabel;
2070 rtx insn;
2071{
2072 rtx *loc;
2073
2074 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2075 loc = &XVECEXP (PATTERN (insn), 0, 0);
2076 else
2077 loc = &PATTERN (insn);
2078
2079 redirect_exp_1 (loc, olabel, nlabel, insn);
2080 if (num_validated_changes () == 0)
2081 return 0;
2082
2083 return apply_change_group ();
2084}
2085
2086/* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2087 the modifications into the change group. Return false if we did
2088 not see how to do that. */
2089
2090int
2091redirect_jump_1 (jump, nlabel)
2092 rtx jump, nlabel;
2093{
2094 int ochanges = num_validated_changes ();
2095 rtx *loc;
2096
2097 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2098 loc = &XVECEXP (PATTERN (jump), 0, 0);
2099 else
2100 loc = &PATTERN (jump);
2101
2102 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2103 return num_validated_changes () > ochanges;
2104}
2105
2106/* Make JUMP go to NLABEL instead of where it jumps now. If the old
2107 jump target label is unused as a result, it and the code following
2108 it may be deleted.
2109
2110 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2111 RETURN insn.
2112
2113 The return value will be 1 if the change was made, 0 if it wasn't
2114 (this can only occur for NLABEL == 0). */
2115
2116int
2117redirect_jump (jump, nlabel, delete_unused)
2118 rtx jump, nlabel;
2119 int delete_unused;
2120{
2121 rtx olabel = JUMP_LABEL (jump);
2122
2123 if (nlabel == olabel)
2124 return 1;
2125
2126 if (! redirect_exp (olabel, nlabel, jump))
2127 return 0;
2128
2129 JUMP_LABEL (jump) = nlabel;
2130 if (nlabel)
2131 ++LABEL_NUSES (nlabel);
2132
2133 /* If we're eliding the jump over exception cleanups at the end of a
2134 function, move the function end note so that -Wreturn-type works. */
2135 if (olabel && nlabel
2136 && NEXT_INSN (olabel)
2137 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2138 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2139 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2140
2141 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2142 /* Undefined labels will remain outside the insn stream. */
2143 && INSN_UID (olabel))
2144 delete_related_insns (olabel);
2145
2146 return 1;
2147}
2148
2149/* Invert the jump condition of rtx X contained in jump insn, INSN.
2150 Accrue the modifications into the change group. */
2151
2152static void
2153invert_exp_1 (insn)
2154 rtx insn;
2155{
2156 RTX_CODE code;
2157 rtx x = pc_set (insn);
2158
2159 if (!x)
2160 abort ();
2161 x = SET_SRC (x);
2162
2163 code = GET_CODE (x);
2164
2165 if (code == IF_THEN_ELSE)
2166 {
2167 rtx comp = XEXP (x, 0);
2168 rtx tem;
2169 enum rtx_code reversed_code;
2170
2171 /* We can do this in two ways: The preferable way, which can only
2172 be done if this is not an integer comparison, is to reverse
2173 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2174 of the IF_THEN_ELSE. If we can't do either, fail. */
2175
2176 reversed_code = reversed_comparison_code (comp, insn);
2177
2178 if (reversed_code != UNKNOWN)
2179 {
2180 validate_change (insn, &XEXP (x, 0),
2181 gen_rtx_fmt_ee (reversed_code,
2182 GET_MODE (comp), XEXP (comp, 0),
2183 XEXP (comp, 1)),
2184 1);
2185 return;
2186 }
2187
2188 tem = XEXP (x, 1);
2189 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2190 validate_change (insn, &XEXP (x, 2), tem, 1);
2191 }
2192 else
2193 abort ();
2194}
2195
2196/* Invert the jump condition of conditional jump insn, INSN.
2197
2198 Return 1 if we can do so, 0 if we cannot find a way to do so that
2199 matches a pattern. */
2200
2201static int
2202invert_exp (insn)
2203 rtx insn;
2204{
2205 invert_exp_1 (insn);
2206 if (num_validated_changes () == 0)
2207 return 0;
2208
2209 return apply_change_group ();
2210}
2211
2212/* Invert the condition of the jump JUMP, and make it jump to label
2213 NLABEL instead of where it jumps now. Accrue changes into the
2214 change group. Return false if we didn't see how to perform the
2215 inversion and redirection. */
2216
2217int
2218invert_jump_1 (jump, nlabel)
2219 rtx jump, nlabel;
2220{
2221 int ochanges;
2222
2223 ochanges = num_validated_changes ();
2224 invert_exp_1 (jump);
2225 if (num_validated_changes () == ochanges)
2226 return 0;
2227
2228 return redirect_jump_1 (jump, nlabel);
2229}
2230
2231/* Invert the condition of the jump JUMP, and make it jump to label
2232 NLABEL instead of where it jumps now. Return true if successful. */
2233
2234int
2235invert_jump (jump, nlabel, delete_unused)
2236 rtx jump, nlabel;
2237 int delete_unused;
2238{
2239 /* We have to either invert the condition and change the label or
2240 do neither. Either operation could fail. We first try to invert
2241 the jump. If that succeeds, we try changing the label. If that fails,
2242 we invert the jump back to what it was. */
2243
2244 if (! invert_exp (jump))
2245 return 0;
2246
2247 if (redirect_jump (jump, nlabel, delete_unused))
2248 {
2249 invert_br_probabilities (jump);
2250
2251 return 1;
2252 }
2253
2254 if (! invert_exp (jump))
2255 /* This should just be putting it back the way it was. */
2256 abort ();
2257
2258 return 0;
2259}
2260
2261
2262
2263/* Like rtx_equal_p except that it considers two REGs as equal
2264 if they renumber to the same value and considers two commutative
2265 operations to be the same if the order of the operands has been
2266 reversed.
2267
2268 ??? Addition is not commutative on the PA due to the weird implicit
2269 space register selection rules for memory addresses. Therefore, we
2270 don't consider a + b == b + a.
2271
2272 We could/should make this test a little tighter. Possibly only
2273 disabling it on the PA via some backend macro or only disabling this
2274 case when the PLUS is inside a MEM. */
2275
2276int
2277rtx_renumbered_equal_p (x, y)
2278 rtx x, y;
2279{
2280 int i;
2281 RTX_CODE code = GET_CODE (x);
2282 const char *fmt;
2283
2284 if (x == y)
2285 return 1;
2286
2287 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2288 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2289 && GET_CODE (SUBREG_REG (y)) == REG)))
2290 {
2291 int reg_x = -1, reg_y = -1;
2292 int byte_x = 0, byte_y = 0;
2293
2294 if (GET_MODE (x) != GET_MODE (y))
2295 return 0;
2296
2297 /* If we haven't done any renumbering, don't
2298 make any assumptions. */
2299 if (reg_renumber == 0)
2300 return rtx_equal_p (x, y);
2301
2302 if (code == SUBREG)
2303 {
2304 reg_x = REGNO (SUBREG_REG (x));
2305 byte_x = SUBREG_BYTE (x);
2306
2307 if (reg_renumber[reg_x] >= 0)
2308 {
2309 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2310 GET_MODE (SUBREG_REG (x)),
2311 byte_x,
2312 GET_MODE (x));
2313 byte_x = 0;
2314 }
2315 }
2316 else
2317 {
2318 reg_x = REGNO (x);
2319 if (reg_renumber[reg_x] >= 0)
2320 reg_x = reg_renumber[reg_x];
2321 }
2322
2323 if (GET_CODE (y) == SUBREG)
2324 {
2325 reg_y = REGNO (SUBREG_REG (y));
2326 byte_y = SUBREG_BYTE (y);
2327
2328 if (reg_renumber[reg_y] >= 0)
2329 {
2330 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2331 GET_MODE (SUBREG_REG (y)),
2332 byte_y,
2333 GET_MODE (y));
2334 byte_y = 0;
2335 }
2336 }
2337 else
2338 {
2339 reg_y = REGNO (y);
2340 if (reg_renumber[reg_y] >= 0)
2341 reg_y = reg_renumber[reg_y];
2342 }
2343
2344 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2345 }
2346
2347 /* Now we have disposed of all the cases
2348 in which different rtx codes can match. */
2349 if (code != GET_CODE (y))
2350 return 0;
2351
2352 switch (code)
2353 {
2354 case PC:
2355 case CC0:
2356 case ADDR_VEC:
2357 case ADDR_DIFF_VEC:
2358 return 0;
2359
2360 case CONST_INT:
2361 return INTVAL (x) == INTVAL (y);
2362
2363 case LABEL_REF:
2364 /* We can't assume nonlocal labels have their following insns yet. */
2365 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2366 return XEXP (x, 0) == XEXP (y, 0);
2367
2368 /* Two label-refs are equivalent if they point at labels
2369 in the same position in the instruction stream. */
2370 return (next_real_insn (XEXP (x, 0))
2371 == next_real_insn (XEXP (y, 0)));
2372
2373 case SYMBOL_REF:
2374 return XSTR (x, 0) == XSTR (y, 0);
2375
2376 case CODE_LABEL:
2377 /* If we didn't match EQ equality above, they aren't the same. */
2378 return 0;
2379
2380 default:
2381 break;
2382 }
2383
2384 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2385
2386 if (GET_MODE (x) != GET_MODE (y))
2387 return 0;
2388
2389 /* For commutative operations, the RTX match if the operand match in any
2390 order. Also handle the simple binary and unary cases without a loop.
2391
2392 ??? Don't consider PLUS a commutative operator; see comments above. */
2393 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2394 && code != PLUS)
2395 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2396 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2397 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2398 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2399 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2400 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2401 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2402 else if (GET_RTX_CLASS (code) == '1')
2403 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2404
2405 /* Compare the elements. If any pair of corresponding elements
2406 fail to match, return 0 for the whole things. */
2407
2408 fmt = GET_RTX_FORMAT (code);
2409 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2410 {
2411 int j;
2412 switch (fmt[i])
2413 {
2414 case 'w':
2415 if (XWINT (x, i) != XWINT (y, i))
2416 return 0;
2417 break;
2418
2419 case 'i':
2420 if (XINT (x, i) != XINT (y, i))
2421 return 0;
2422 break;
2423
2424 case 't':
2425 if (XTREE (x, i) != XTREE (y, i))
2426 return 0;
2427 break;
2428
2429 case 's':
2430 if (strcmp (XSTR (x, i), XSTR (y, i)))
2431 return 0;
2432 break;
2433
2434 case 'e':
2435 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2436 return 0;
2437 break;
2438
2439 case 'u':
2440 if (XEXP (x, i) != XEXP (y, i))
2441 return 0;
2442 /* fall through. */
2443 case '0':
2444 break;
2445
2446 case 'E':
2447 if (XVECLEN (x, i) != XVECLEN (y, i))
2448 return 0;
2449 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2450 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2451 return 0;
2452 break;
2453
2454 default:
2455 abort ();
2456 }
2457 }
2458 return 1;
2459}
2460
2461
2462/* If X is a hard register or equivalent to one or a subregister of one,
2463 return the hard register number. If X is a pseudo register that was not
2464 assigned a hard register, return the pseudo register number. Otherwise,
2465 return -1. Any rtx is valid for X. */
2466
2467int
2468true_regnum (x)
2469 rtx x;
2470{
2471 if (GET_CODE (x) == REG)
2472 {
2473 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2474 return reg_renumber[REGNO (x)];
2475 return REGNO (x);
2476 }
2477 if (GET_CODE (x) == SUBREG)
2478 {
2479 int base = true_regnum (SUBREG_REG (x));
2480 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2481 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2482 GET_MODE (SUBREG_REG (x)),
2483 SUBREG_BYTE (x), GET_MODE (x));
2484 }
2485 return -1;
2486}
2487
2488/* Return regno of the register REG and handle subregs too. */
2489unsigned int
2490reg_or_subregno (reg)
2491 rtx reg;
2492{
2493 if (REG_P (reg))
2494 return REGNO (reg);
2495 if (GET_CODE (reg) == SUBREG)
2496 return REGNO (SUBREG_REG (reg));
2497 abort ();
2498}
Note: See TracBrowser for help on using the repository browser.