source: trunk/src/gcc/gcc/recog.c@ 1392

Last change on this file since 1392 was 1392, checked in by bird, 21 years ago

This commit was generated by cvs2svn to compensate for changes in r1391,
which included commits to RCS files with non-trunk default branches.

  • Property cvs2svn:cvs-rev set to 1.1.1.2
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 92.2 KB
Line 
1/* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "insn-config.h"
28#include "insn-attr.h"
29#include "hard-reg-set.h"
30#include "recog.h"
31#include "regs.h"
32#include "expr.h"
33#include "function.h"
34#include "flags.h"
35#include "real.h"
36#include "toplev.h"
37#include "basic-block.h"
38#include "output.h"
39#include "reload.h"
40
41#ifndef STACK_PUSH_CODE
42#ifdef STACK_GROWS_DOWNWARD
43#define STACK_PUSH_CODE PRE_DEC
44#else
45#define STACK_PUSH_CODE PRE_INC
46#endif
47#endif
48
49#ifndef STACK_POP_CODE
50#ifdef STACK_GROWS_DOWNWARD
51#define STACK_POP_CODE POST_INC
52#else
53#define STACK_POP_CODE POST_DEC
54#endif
55#endif
56
57static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59static void validate_replace_src_1 PARAMS ((rtx *, void *));
60static rtx split_insn PARAMS ((rtx));
61
62/* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70int volatile_ok;
71
72struct recog_data recog_data;
73
74/* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78/* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81int which_alternative;
82
83/* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87int reload_completed;
88
89/* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93void
94init_recog_no_volatile ()
95{
96 volatile_ok = 0;
97}
98
99void
100init_recog ()
101{
102 volatile_ok = 1;
103}
104
105/* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114int
115recog_memoized_1 (insn)
116 rtx insn;
117{
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
121}
122
123
124/* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
126
127int
128check_asm_operands (x)
129 rtx x;
130{
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
135
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
138 {
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
143 }
144
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
150
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
153
154 decode_asm_operands (x, operands, NULL, constraints, NULL);
155
156 for (i = 0; i < noperands; i++)
157 {
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
163
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
166 }
167
168 return 1;
169}
170
171
172/* Static data for the next two routines. */
173
174typedef struct change_t
175{
176 rtx object;
177 int old_code;
178 rtx *loc;
179 rtx old;
180} change_t;
181
182static change_t *changes;
183static int changes_allocated;
184
185static int num_changes = 0;
186
187/* Validate a proposed change to OBJECT. LOC is the location in the rtl
188 at which NEW will be placed. If OBJECT is zero, no validation is done,
189 the change is simply made.
190
191 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
192 will be called with the address and mode as parameters. If OBJECT is
193 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 the change in place.
195
196 IN_GROUP is nonzero if this is part of a group of changes that must be
197 performed as a group. In that case, the changes will be stored. The
198 function `apply_change_group' will validate and apply the changes.
199
200 If IN_GROUP is zero, this is a single change. Try to recognize the insn
201 or validate the memory reference with the change applied. If the result
202 is not valid for the machine, suppress the change and return zero.
203 Otherwise, perform the change and return 1. */
204
205int
206validate_change (object, loc, new, in_group)
207 rtx object;
208 rtx *loc;
209 rtx new;
210 int in_group;
211{
212 rtx old = *loc;
213
214 if (old == new || rtx_equal_p (old, new))
215 return 1;
216
217 if (in_group == 0 && num_changes != 0)
218 abort ();
219
220 *loc = new;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes =
233 (change_t*) xrealloc (changes,
234 sizeof (change_t) * changes_allocated);
235 }
236
237 changes[num_changes].object = object;
238 changes[num_changes].loc = loc;
239 changes[num_changes].old = old;
240
241 if (object && GET_CODE (object) != MEM)
242 {
243 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 case invalid. */
245 changes[num_changes].old_code = INSN_CODE (object);
246 INSN_CODE (object) = -1;
247 }
248
249 num_changes++;
250
251 /* If we are making a group of changes, return 1. Otherwise, validate the
252 change group we made. */
253
254 if (in_group)
255 return 1;
256 else
257 return apply_change_group ();
258}
259
260/* This subroutine of apply_change_group verifies whether the changes to INSN
261 were valid; i.e. whether INSN can still be recognized. */
262
263int
264insn_invalid_p (insn)
265 rtx insn;
266{
267 rtx pat = PATTERN (insn);
268 int num_clobbers = 0;
269 /* If we are before reload and the pattern is a SET, see if we can add
270 clobbers. */
271 int icode = recog (pat, insn,
272 (GET_CODE (pat) == SET
273 && ! reload_completed && ! reload_in_progress)
274 ? &num_clobbers : 0);
275 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276
277
278 /* If this is an asm and the operand aren't legal, then fail. Likewise if
279 this is not an asm and the insn wasn't recognized. */
280 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
281 || (!is_asm && icode < 0))
282 return 1;
283
284 /* If we have to add CLOBBERs, fail if we have to add ones that reference
285 hard registers since our callers can't know if they are live or not.
286 Otherwise, add them. */
287 if (num_clobbers > 0)
288 {
289 rtx newpat;
290
291 if (added_clobbers_hard_reg_p (icode))
292 return 1;
293
294 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
295 XVECEXP (newpat, 0, 0) = pat;
296 add_clobbers (newpat, icode);
297 PATTERN (insn) = pat = newpat;
298 }
299
300 /* After reload, verify that all constraints are satisfied. */
301 if (reload_completed)
302 {
303 extract_insn (insn);
304
305 if (! constrain_operands (1))
306 return 1;
307 }
308
309 INSN_CODE (insn) = icode;
310 return 0;
311}
312
313/* Return number of changes made and not validated yet. */
314int
315num_changes_pending ()
316{
317 return num_changes;
318}
319
320/* Apply a group of changes previously issued with `validate_change'.
321 Return 1 if all changes are valid, zero otherwise. */
322
323int
324apply_change_group ()
325{
326 int i;
327 rtx last_validated = NULL_RTX;
328
329 /* The changes have been applied and all INSN_CODEs have been reset to force
330 rerecognition.
331
332 The changes are valid if we aren't given an object, or if we are
333 given a MEM and it still is a valid address, or if this is in insn
334 and it is recognized. In the latter case, if reload has completed,
335 we also require that the operands meet the constraints for
336 the insn. */
337
338 for (i = 0; i < num_changes; i++)
339 {
340 rtx object = changes[i].object;
341
342 /* if there is no object to test or if it is the same as the one we
343 already tested, ignore it. */
344 if (object == 0 || object == last_validated)
345 continue;
346
347 if (GET_CODE (object) == MEM)
348 {
349 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
350 break;
351 }
352 else if (insn_invalid_p (object))
353 {
354 rtx pat = PATTERN (object);
355
356 /* Perhaps we couldn't recognize the insn because there were
357 extra CLOBBERs at the end. If so, try to re-recognize
358 without the last CLOBBER (later iterations will cause each of
359 them to be eliminated, in turn). But don't do this if we
360 have an ASM_OPERAND. */
361 if (GET_CODE (pat) == PARALLEL
362 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
363 && asm_noperands (PATTERN (object)) < 0)
364 {
365 rtx newpat;
366
367 if (XVECLEN (pat, 0) == 2)
368 newpat = XVECEXP (pat, 0, 0);
369 else
370 {
371 int j;
372
373 newpat
374 = gen_rtx_PARALLEL (VOIDmode,
375 rtvec_alloc (XVECLEN (pat, 0) - 1));
376 for (j = 0; j < XVECLEN (newpat, 0); j++)
377 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
378 }
379
380 /* Add a new change to this group to replace the pattern
381 with this new pattern. Then consider this change
382 as having succeeded. The change we added will
383 cause the entire call to fail if things remain invalid.
384
385 Note that this can lose if a later change than the one
386 we are processing specified &XVECEXP (PATTERN (object), 0, X)
387 but this shouldn't occur. */
388
389 validate_change (object, &PATTERN (object), newpat, 1);
390 continue;
391 }
392 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
393 /* If this insn is a CLOBBER or USE, it is always valid, but is
394 never recognized. */
395 continue;
396 else
397 break;
398 }
399 last_validated = object;
400 }
401
402 if (i == num_changes)
403 {
404 basic_block bb;
405
406 for (i = 0; i < num_changes; i++)
407 if (changes[i].object
408 && INSN_P (changes[i].object)
409 && (bb = BLOCK_FOR_INSN (changes[i].object)))
410 bb->flags |= BB_DIRTY;
411
412 num_changes = 0;
413 return 1;
414 }
415 else
416 {
417 cancel_changes (0);
418 return 0;
419 }
420}
421
422/* Return the number of changes so far in the current group. */
423
424int
425num_validated_changes ()
426{
427 return num_changes;
428}
429
430/* Retract the changes numbered NUM and up. */
431
432void
433cancel_changes (num)
434 int num;
435{
436 int i;
437
438 /* Back out all the changes. Do this in the opposite order in which
439 they were made. */
440 for (i = num_changes - 1; i >= num; i--)
441 {
442 *changes[i].loc = changes[i].old;
443 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
444 INSN_CODE (changes[i].object) = changes[i].old_code;
445 }
446 num_changes = num;
447}
448
449/* Replace every occurrence of FROM in X with TO. Mark each change with
450 validate_change passing OBJECT. */
451
452static void
453validate_replace_rtx_1 (loc, from, to, object)
454 rtx *loc;
455 rtx from, to, object;
456{
457 int i, j;
458 const char *fmt;
459 rtx x = *loc;
460 enum rtx_code code;
461 enum machine_mode op0_mode = VOIDmode;
462 int prev_changes = num_changes;
463 rtx new;
464
465 if (!x)
466 return;
467
468 code = GET_CODE (x);
469 fmt = GET_RTX_FORMAT (code);
470 if (fmt[0] == 'e')
471 op0_mode = GET_MODE (XEXP (x, 0));
472
473 /* X matches FROM if it is the same rtx or they are both referring to the
474 same register in the same mode. Avoid calling rtx_equal_p unless the
475 operands look similar. */
476
477 if (x == from
478 || (GET_CODE (x) == REG && GET_CODE (from) == REG
479 && GET_MODE (x) == GET_MODE (from)
480 && REGNO (x) == REGNO (from))
481 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
482 && rtx_equal_p (x, from)))
483 {
484 validate_change (object, loc, to, 1);
485 return;
486 }
487
488 /* Call ourself recursively to perform the replacements. */
489
490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
491 {
492 if (fmt[i] == 'e')
493 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
494 else if (fmt[i] == 'E')
495 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
496 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
497 }
498
499 /* If we didn't substitute, there is nothing more to do. */
500 if (num_changes == prev_changes)
501 return;
502
503 /* Allow substituted expression to have different mode. This is used by
504 regmove to change mode of pseudo register. */
505 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
506 op0_mode = GET_MODE (XEXP (x, 0));
507
508 /* Do changes needed to keep rtx consistent. Don't do any other
509 simplifications, as it is not our job. */
510
511 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
512 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
513 {
514 validate_change (object, loc,
515 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
516 : swap_condition (code),
517 GET_MODE (x), XEXP (x, 1),
518 XEXP (x, 0)), 1);
519 x = *loc;
520 code = GET_CODE (x);
521 }
522
523 switch (code)
524 {
525 case PLUS:
526 /* If we have a PLUS whose second operand is now a CONST_INT, use
527 simplify_gen_binary to try to simplify it.
528 ??? We may want later to remove this, once simplification is
529 separated from this function. */
530 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
531 validate_change (object, loc,
532 simplify_gen_binary
533 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
534 break;
535 case MINUS:
536 if (GET_CODE (XEXP (x, 1)) == CONST_INT
537 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
538 validate_change (object, loc,
539 simplify_gen_binary
540 (PLUS, GET_MODE (x), XEXP (x, 0),
541 simplify_gen_unary (NEG,
542 GET_MODE (x), XEXP (x, 1),
543 GET_MODE (x))), 1);
544 break;
545 case ZERO_EXTEND:
546 case SIGN_EXTEND:
547 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
548 {
549 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
550 op0_mode);
551 /* If any of the above failed, substitute in something that
552 we know won't be recognized. */
553 if (!new)
554 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
555 validate_change (object, loc, new, 1);
556 }
557 break;
558 case SUBREG:
559 /* All subregs possible to simplify should be simplified. */
560 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
561 SUBREG_BYTE (x));
562
563 /* Subregs of VOIDmode operands are incorrect. */
564 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
565 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
566 if (new)
567 validate_change (object, loc, new, 1);
568 break;
569 case ZERO_EXTRACT:
570 case SIGN_EXTRACT:
571 /* If we are replacing a register with memory, try to change the memory
572 to be the mode required for memory in extract operations (this isn't
573 likely to be an insertion operation; if it was, nothing bad will
574 happen, we might just fail in some cases). */
575
576 if (GET_CODE (XEXP (x, 0)) == MEM
577 && GET_CODE (XEXP (x, 1)) == CONST_INT
578 && GET_CODE (XEXP (x, 2)) == CONST_INT
579 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
580 && !MEM_VOLATILE_P (XEXP (x, 0)))
581 {
582 enum machine_mode wanted_mode = VOIDmode;
583 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
584 int pos = INTVAL (XEXP (x, 2));
585
586 if (GET_CODE (x) == ZERO_EXTRACT)
587 {
588 enum machine_mode new_mode
589 = mode_for_extraction (EP_extzv, 1);
590 if (new_mode != MAX_MACHINE_MODE)
591 wanted_mode = new_mode;
592 }
593 else if (GET_CODE (x) == SIGN_EXTRACT)
594 {
595 enum machine_mode new_mode
596 = mode_for_extraction (EP_extv, 1);
597 if (new_mode != MAX_MACHINE_MODE)
598 wanted_mode = new_mode;
599 }
600
601 /* If we have a narrower mode, we can do something. */
602 if (wanted_mode != VOIDmode
603 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
604 {
605 int offset = pos / BITS_PER_UNIT;
606 rtx newmem;
607
608 /* If the bytes and bits are counted differently, we
609 must adjust the offset. */
610 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
611 offset =
612 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
613 offset);
614
615 pos %= GET_MODE_BITSIZE (wanted_mode);
616
617 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
618
619 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
620 validate_change (object, &XEXP (x, 0), newmem, 1);
621 }
622 }
623
624 break;
625
626 default:
627 break;
628 }
629}
630
631/* Try replacing every occurrence of FROM in subexpression LOC of INSN
632 with TO. After all changes have been made, validate by seeing
633 if INSN is still valid. */
634
635int
636validate_replace_rtx_subexp (from, to, insn, loc)
637 rtx from, to, insn, *loc;
638{
639 validate_replace_rtx_1 (loc, from, to, insn);
640 return apply_change_group ();
641}
642
643/* Try replacing every occurrence of FROM in INSN with TO. After all
644 changes have been made, validate by seeing if INSN is still valid. */
645
646int
647validate_replace_rtx (from, to, insn)
648 rtx from, to, insn;
649{
650 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
651 return apply_change_group ();
652}
653
654/* Try replacing every occurrence of FROM in INSN with TO. */
655
656void
657validate_replace_rtx_group (from, to, insn)
658 rtx from, to, insn;
659{
660 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661}
662
663/* Function called by note_uses to replace used subexpressions. */
664struct validate_replace_src_data
665{
666 rtx from; /* Old RTX */
667 rtx to; /* New RTX */
668 rtx insn; /* Insn in which substitution is occurring. */
669};
670
671static void
672validate_replace_src_1 (x, data)
673 rtx *x;
674 void *data;
675{
676 struct validate_replace_src_data *d
677 = (struct validate_replace_src_data *) data;
678
679 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
680}
681
682/* Try replacing every occurrence of FROM in INSN with TO, avoiding
683 SET_DESTs. */
684
685void
686validate_replace_src_group (from, to, insn)
687 rtx from, to, insn;
688{
689 struct validate_replace_src_data d;
690
691 d.from = from;
692 d.to = to;
693 d.insn = insn;
694 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
695}
696
697/* Same as validate_repalace_src_group, but validate by seeing if
698 INSN is still valid. */
699int
700validate_replace_src (from, to, insn)
701 rtx from, to, insn;
702{
703 validate_replace_src_group (from, to, insn);
704 return apply_change_group ();
705}
706
707
708#ifdef HAVE_cc0
709/* Return 1 if the insn using CC0 set by INSN does not contain
710 any ordered tests applied to the condition codes.
711 EQ and NE tests do not count. */
712
713int
714next_insn_tests_no_inequality (insn)
715 rtx insn;
716{
717 rtx next = next_cc0_user (insn);
718
719 /* If there is no next insn, we have to take the conservative choice. */
720 if (next == 0)
721 return 0;
722
723 return ((GET_CODE (next) == JUMP_INSN
724 || GET_CODE (next) == INSN
725 || GET_CODE (next) == CALL_INSN)
726 && ! inequality_comparisons_p (PATTERN (next)));
727}
728
729#if 0 /* This is useless since the insn that sets the cc's
730 must be followed immediately by the use of them. */
731/* Return 1 if the CC value set up by INSN is not used. */
732
733int
734next_insns_test_no_inequality (insn)
735 rtx insn;
736{
737 rtx next = NEXT_INSN (insn);
738
739 for (; next != 0; next = NEXT_INSN (next))
740 {
741 if (GET_CODE (next) == CODE_LABEL
742 || GET_CODE (next) == BARRIER)
743 return 1;
744 if (GET_CODE (next) == NOTE)
745 continue;
746 if (inequality_comparisons_p (PATTERN (next)))
747 return 0;
748 if (sets_cc0_p (PATTERN (next)) == 1)
749 return 1;
750 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
751 return 1;
752 }
753 return 1;
754}
755#endif
756#endif
757
758
759/* This is used by find_single_use to locate an rtx that contains exactly one
760 use of DEST, which is typically either a REG or CC0. It returns a
761 pointer to the innermost rtx expression containing DEST. Appearances of
762 DEST that are being used to totally replace it are not counted. */
763
764static rtx *
765find_single_use_1 (dest, loc)
766 rtx dest;
767 rtx *loc;
768{
769 rtx x = *loc;
770 enum rtx_code code = GET_CODE (x);
771 rtx *result = 0;
772 rtx *this_result;
773 int i;
774 const char *fmt;
775
776 switch (code)
777 {
778 case CONST_INT:
779 case CONST:
780 case LABEL_REF:
781 case SYMBOL_REF:
782 case CONST_DOUBLE:
783 case CONST_VECTOR:
784 case CLOBBER:
785 return 0;
786
787 case SET:
788 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
789 of a REG that occupies all of the REG, the insn uses DEST if
790 it is mentioned in the destination or the source. Otherwise, we
791 need just check the source. */
792 if (GET_CODE (SET_DEST (x)) != CC0
793 && GET_CODE (SET_DEST (x)) != PC
794 && GET_CODE (SET_DEST (x)) != REG
795 && ! (GET_CODE (SET_DEST (x)) == SUBREG
796 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
797 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
798 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
799 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
800 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
801 break;
802
803 return find_single_use_1 (dest, &SET_SRC (x));
804
805 case MEM:
806 case SUBREG:
807 return find_single_use_1 (dest, &XEXP (x, 0));
808
809 default:
810 break;
811 }
812
813 /* If it wasn't one of the common cases above, check each expression and
814 vector of this code. Look for a unique usage of DEST. */
815
816 fmt = GET_RTX_FORMAT (code);
817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
818 {
819 if (fmt[i] == 'e')
820 {
821 if (dest == XEXP (x, i)
822 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
823 && REGNO (dest) == REGNO (XEXP (x, i))))
824 this_result = loc;
825 else
826 this_result = find_single_use_1 (dest, &XEXP (x, i));
827
828 if (result == 0)
829 result = this_result;
830 else if (this_result)
831 /* Duplicate usage. */
832 return 0;
833 }
834 else if (fmt[i] == 'E')
835 {
836 int j;
837
838 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
839 {
840 if (XVECEXP (x, i, j) == dest
841 || (GET_CODE (dest) == REG
842 && GET_CODE (XVECEXP (x, i, j)) == REG
843 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
844 this_result = loc;
845 else
846 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
847
848 if (result == 0)
849 result = this_result;
850 else if (this_result)
851 return 0;
852 }
853 }
854 }
855
856 return result;
857}
858
859
860/* See if DEST, produced in INSN, is used only a single time in the
861 sequel. If so, return a pointer to the innermost rtx expression in which
862 it is used.
863
864 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
865
866 This routine will return usually zero either before flow is called (because
867 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
868 note can't be trusted).
869
870 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
871 care about REG_DEAD notes or LOG_LINKS.
872
873 Otherwise, we find the single use by finding an insn that has a
874 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
875 only referenced once in that insn, we know that it must be the first
876 and last insn referencing DEST. */
877
878rtx *
879find_single_use (dest, insn, ploc)
880 rtx dest;
881 rtx insn;
882 rtx *ploc;
883{
884 rtx next;
885 rtx *result;
886 rtx link;
887
888#ifdef HAVE_cc0
889 if (dest == cc0_rtx)
890 {
891 next = NEXT_INSN (insn);
892 if (next == 0
893 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
894 return 0;
895
896 result = find_single_use_1 (dest, &PATTERN (next));
897 if (result && ploc)
898 *ploc = next;
899 return result;
900 }
901#endif
902
903 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
904 return 0;
905
906 for (next = next_nonnote_insn (insn);
907 next != 0 && GET_CODE (next) != CODE_LABEL;
908 next = next_nonnote_insn (next))
909 if (INSN_P (next) && dead_or_set_p (next, dest))
910 {
911 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
912 if (XEXP (link, 0) == insn)
913 break;
914
915 if (link)
916 {
917 result = find_single_use_1 (dest, &PATTERN (next));
918 if (ploc)
919 *ploc = next;
920 return result;
921 }
922 }
923
924 return 0;
925}
926
927
928/* Return 1 if OP is a valid general operand for machine mode MODE.
929 This is either a register reference, a memory reference,
930 or a constant. In the case of a memory reference, the address
931 is checked for general validity for the target machine.
932
933 Register and memory references must have mode MODE in order to be valid,
934 but some constants have no machine mode and are valid for any mode.
935
936 If MODE is VOIDmode, OP is checked for validity for whatever mode
937 it has.
938
939 The main use of this function is as a predicate in match_operand
940 expressions in the machine description.
941
942 For an explanation of this function's behavior for registers of
943 class NO_REGS, see the comment for `register_operand'. */
944
945int
946general_operand (op, mode)
947 rtx op;
948 enum machine_mode mode;
949{
950 enum rtx_code code = GET_CODE (op);
951
952 if (mode == VOIDmode)
953 mode = GET_MODE (op);
954
955 /* Don't accept CONST_INT or anything similar
956 if the caller wants something floating. */
957 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
958 && GET_MODE_CLASS (mode) != MODE_INT
959 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
960 return 0;
961
962 if (GET_CODE (op) == CONST_INT
963 && mode != VOIDmode
964 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
965 return 0;
966
967 if (CONSTANT_P (op))
968 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
969 || mode == VOIDmode)
970#ifdef LEGITIMATE_PIC_OPERAND_P
971 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
972#endif
973 && LEGITIMATE_CONSTANT_P (op));
974
975 /* Except for certain constants with VOIDmode, already checked for,
976 OP's mode must match MODE if MODE specifies a mode. */
977
978 if (GET_MODE (op) != mode)
979 return 0;
980
981 if (code == SUBREG)
982 {
983 rtx sub = SUBREG_REG (op);
984
985#ifdef INSN_SCHEDULING
986 /* On machines that have insn scheduling, we want all memory
987 reference to be explicit, so outlaw paradoxical SUBREGs. */
988 if (GET_CODE (sub) == MEM
989 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
990 return 0;
991#endif
992 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
993 may result in incorrect reference. We should simplify all valid
994 subregs of MEM anyway. But allow this after reload because we
995 might be called from cleanup_subreg_operands.
996
997 ??? This is a kludge. */
998 if (!reload_completed && SUBREG_BYTE (op) != 0
999 && GET_CODE (sub) == MEM)
1000 return 0;
1001
1002 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1003 create such rtl, and we must reject it. */
1004 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1005 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1006 return 0;
1007
1008 op = sub;
1009 code = GET_CODE (op);
1010 }
1011
1012 if (code == REG)
1013 /* A register whose class is NO_REGS is not a general operand. */
1014 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1015 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1016
1017 if (code == MEM)
1018 {
1019 rtx y = XEXP (op, 0);
1020
1021 if (! volatile_ok && MEM_VOLATILE_P (op))
1022 return 0;
1023
1024 if (GET_CODE (y) == ADDRESSOF)
1025 return 1;
1026
1027 /* Use the mem's mode, since it will be reloaded thus. */
1028 mode = GET_MODE (op);
1029 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1030 }
1031
1032 /* Pretend this is an operand for now; we'll run force_operand
1033 on its replacement in fixup_var_refs_1. */
1034 if (code == ADDRESSOF)
1035 return 1;
1036
1037 return 0;
1038
1039 win:
1040 return 1;
1041}
1042
1043
1044/* Return 1 if OP is a valid memory address for a memory reference
1045 of mode MODE.
1046
1047 The main use of this function is as a predicate in match_operand
1048 expressions in the machine description. */
1049
1050int
1051address_operand (op, mode)
1052 rtx op;
1053 enum machine_mode mode;
1054{
1055 return memory_address_p (mode, op);
1056}
1057
1058/* Return 1 if OP is a register reference of mode MODE.
1059 If MODE is VOIDmode, accept a register in any mode.
1060
1061 The main use of this function is as a predicate in match_operand
1062 expressions in the machine description.
1063
1064 As a special exception, registers whose class is NO_REGS are
1065 not accepted by `register_operand'. The reason for this change
1066 is to allow the representation of special architecture artifacts
1067 (such as a condition code register) without extending the rtl
1068 definitions. Since registers of class NO_REGS cannot be used
1069 as registers in any case where register classes are examined,
1070 it is most consistent to keep this function from accepting them. */
1071
1072int
1073register_operand (op, mode)
1074 rtx op;
1075 enum machine_mode mode;
1076{
1077 if (GET_MODE (op) != mode && mode != VOIDmode)
1078 return 0;
1079
1080 if (GET_CODE (op) == SUBREG)
1081 {
1082 rtx sub = SUBREG_REG (op);
1083
1084 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1085 because it is guaranteed to be reloaded into one.
1086 Just make sure the MEM is valid in itself.
1087 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1088 but currently it does result from (SUBREG (REG)...) where the
1089 reg went on the stack.) */
1090 if (! reload_completed && GET_CODE (sub) == MEM)
1091 return general_operand (op, mode);
1092
1093#ifdef CANNOT_CHANGE_MODE_CLASS
1094 if (GET_CODE (sub) == REG
1095 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1096 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1097 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1098 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1099 return 0;
1100#endif
1101
1102 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1103 create such rtl, and we must reject it. */
1104 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1105 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1106 return 0;
1107
1108 op = sub;
1109 }
1110
1111 /* If we have an ADDRESSOF, consider it valid since it will be
1112 converted into something that will not be a MEM. */
1113 if (GET_CODE (op) == ADDRESSOF)
1114 return 1;
1115
1116 /* We don't consider registers whose class is NO_REGS
1117 to be a register operand. */
1118 return (GET_CODE (op) == REG
1119 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1120 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1121}
1122
1123/* Return 1 for a register in Pmode; ignore the tested mode. */
1124
1125int
1126pmode_register_operand (op, mode)
1127 rtx op;
1128 enum machine_mode mode ATTRIBUTE_UNUSED;
1129{
1130 return register_operand (op, Pmode);
1131}
1132
1133/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1134 or a hard register. */
1135
1136int
1137scratch_operand (op, mode)
1138 rtx op;
1139 enum machine_mode mode;
1140{
1141 if (GET_MODE (op) != mode && mode != VOIDmode)
1142 return 0;
1143
1144 return (GET_CODE (op) == SCRATCH
1145 || (GET_CODE (op) == REG
1146 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1147}
1148
1149/* Return 1 if OP is a valid immediate operand for mode MODE.
1150
1151 The main use of this function is as a predicate in match_operand
1152 expressions in the machine description. */
1153
1154int
1155immediate_operand (op, mode)
1156 rtx op;
1157 enum machine_mode mode;
1158{
1159 /* Don't accept CONST_INT or anything similar
1160 if the caller wants something floating. */
1161 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1162 && GET_MODE_CLASS (mode) != MODE_INT
1163 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1164 return 0;
1165
1166 if (GET_CODE (op) == CONST_INT
1167 && mode != VOIDmode
1168 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1169 return 0;
1170
1171 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1172 result in 0/1. It seems a safe assumption that this is
1173 in range for everyone. */
1174 if (GET_CODE (op) == CONSTANT_P_RTX)
1175 return 1;
1176
1177 return (CONSTANT_P (op)
1178 && (GET_MODE (op) == mode || mode == VOIDmode
1179 || GET_MODE (op) == VOIDmode)
1180#ifdef LEGITIMATE_PIC_OPERAND_P
1181 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1182#endif
1183 && LEGITIMATE_CONSTANT_P (op));
1184}
1185
1186/* Returns 1 if OP is an operand that is a CONST_INT. */
1187
1188int
1189const_int_operand (op, mode)
1190 rtx op;
1191 enum machine_mode mode;
1192{
1193 if (GET_CODE (op) != CONST_INT)
1194 return 0;
1195
1196 if (mode != VOIDmode
1197 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1198 return 0;
1199
1200 return 1;
1201}
1202
1203/* Returns 1 if OP is an operand that is a constant integer or constant
1204 floating-point number. */
1205
1206int
1207const_double_operand (op, mode)
1208 rtx op;
1209 enum machine_mode mode;
1210{
1211 /* Don't accept CONST_INT or anything similar
1212 if the caller wants something floating. */
1213 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1214 && GET_MODE_CLASS (mode) != MODE_INT
1215 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1216 return 0;
1217
1218 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1219 && (mode == VOIDmode || GET_MODE (op) == mode
1220 || GET_MODE (op) == VOIDmode));
1221}
1222
1223/* Return 1 if OP is a general operand that is not an immediate operand. */
1224
1225int
1226nonimmediate_operand (op, mode)
1227 rtx op;
1228 enum machine_mode mode;
1229{
1230 return (general_operand (op, mode) && ! CONSTANT_P (op));
1231}
1232
1233/* Return 1 if OP is a register reference or immediate value of mode MODE. */
1234
1235int
1236nonmemory_operand (op, mode)
1237 rtx op;
1238 enum machine_mode mode;
1239{
1240 if (CONSTANT_P (op))
1241 {
1242 /* Don't accept CONST_INT or anything similar
1243 if the caller wants something floating. */
1244 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1245 && GET_MODE_CLASS (mode) != MODE_INT
1246 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1247 return 0;
1248
1249 if (GET_CODE (op) == CONST_INT
1250 && mode != VOIDmode
1251 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1252 return 0;
1253
1254 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1255 || mode == VOIDmode)
1256#ifdef LEGITIMATE_PIC_OPERAND_P
1257 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1258#endif
1259 && LEGITIMATE_CONSTANT_P (op));
1260 }
1261
1262 if (GET_MODE (op) != mode && mode != VOIDmode)
1263 return 0;
1264
1265 if (GET_CODE (op) == SUBREG)
1266 {
1267 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1268 because it is guaranteed to be reloaded into one.
1269 Just make sure the MEM is valid in itself.
1270 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1271 but currently it does result from (SUBREG (REG)...) where the
1272 reg went on the stack.) */
1273 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1274 return general_operand (op, mode);
1275 op = SUBREG_REG (op);
1276 }
1277
1278 /* We don't consider registers whose class is NO_REGS
1279 to be a register operand. */
1280 return (GET_CODE (op) == REG
1281 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1282 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1283}
1284
1285/* Return 1 if OP is a valid operand that stands for pushing a
1286 value of mode MODE onto the stack.
1287
1288 The main use of this function is as a predicate in match_operand
1289 expressions in the machine description. */
1290
1291int
1292push_operand (op, mode)
1293 rtx op;
1294 enum machine_mode mode;
1295{
1296 unsigned int rounded_size = GET_MODE_SIZE (mode);
1297
1298#ifdef PUSH_ROUNDING
1299 rounded_size = PUSH_ROUNDING (rounded_size);
1300#endif
1301
1302 if (GET_CODE (op) != MEM)
1303 return 0;
1304
1305 if (mode != VOIDmode && GET_MODE (op) != mode)
1306 return 0;
1307
1308 op = XEXP (op, 0);
1309
1310 if (rounded_size == GET_MODE_SIZE (mode))
1311 {
1312 if (GET_CODE (op) != STACK_PUSH_CODE)
1313 return 0;
1314 }
1315 else
1316 {
1317 if (GET_CODE (op) != PRE_MODIFY
1318 || GET_CODE (XEXP (op, 1)) != PLUS
1319 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1320 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1321#ifdef STACK_GROWS_DOWNWARD
1322 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1323#else
1324 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1325#endif
1326 )
1327 return 0;
1328 }
1329
1330 return XEXP (op, 0) == stack_pointer_rtx;
1331}
1332
1333/* Return 1 if OP is a valid operand that stands for popping a
1334 value of mode MODE off the stack.
1335
1336 The main use of this function is as a predicate in match_operand
1337 expressions in the machine description. */
1338
1339int
1340pop_operand (op, mode)
1341 rtx op;
1342 enum machine_mode mode;
1343{
1344 if (GET_CODE (op) != MEM)
1345 return 0;
1346
1347 if (mode != VOIDmode && GET_MODE (op) != mode)
1348 return 0;
1349
1350 op = XEXP (op, 0);
1351
1352 if (GET_CODE (op) != STACK_POP_CODE)
1353 return 0;
1354
1355 return XEXP (op, 0) == stack_pointer_rtx;
1356}
1357
1358/* Return 1 if ADDR is a valid memory address for mode MODE. */
1359
1360int
1361memory_address_p (mode, addr)
1362 enum machine_mode mode ATTRIBUTE_UNUSED;
1363 rtx addr;
1364{
1365 if (GET_CODE (addr) == ADDRESSOF)
1366 return 1;
1367
1368 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1369 return 0;
1370
1371 win:
1372 return 1;
1373}
1374
1375/* Return 1 if OP is a valid memory reference with mode MODE,
1376 including a valid address.
1377
1378 The main use of this function is as a predicate in match_operand
1379 expressions in the machine description. */
1380
1381int
1382memory_operand (op, mode)
1383 rtx op;
1384 enum machine_mode mode;
1385{
1386 rtx inner;
1387
1388 if (! reload_completed)
1389 /* Note that no SUBREG is a memory operand before end of reload pass,
1390 because (SUBREG (MEM...)) forces reloading into a register. */
1391 return GET_CODE (op) == MEM && general_operand (op, mode);
1392
1393 if (mode != VOIDmode && GET_MODE (op) != mode)
1394 return 0;
1395
1396 inner = op;
1397 if (GET_CODE (inner) == SUBREG)
1398 inner = SUBREG_REG (inner);
1399
1400 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1401}
1402
1403/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1404 that is, a memory reference whose address is a general_operand. */
1405
1406int
1407indirect_operand (op, mode)
1408 rtx op;
1409 enum machine_mode mode;
1410{
1411 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1412 if (! reload_completed
1413 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1414 {
1415 int offset = SUBREG_BYTE (op);
1416 rtx inner = SUBREG_REG (op);
1417
1418 if (mode != VOIDmode && GET_MODE (op) != mode)
1419 return 0;
1420
1421 /* The only way that we can have a general_operand as the resulting
1422 address is if OFFSET is zero and the address already is an operand
1423 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1424 operand. */
1425
1426 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1427 || (GET_CODE (XEXP (inner, 0)) == PLUS
1428 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1429 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1430 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1431 }
1432
1433 return (GET_CODE (op) == MEM
1434 && memory_operand (op, mode)
1435 && general_operand (XEXP (op, 0), Pmode));
1436}
1437
1438/* Return 1 if this is a comparison operator. This allows the use of
1439 MATCH_OPERATOR to recognize all the branch insns. */
1440
1441int
1442comparison_operator (op, mode)
1443 rtx op;
1444 enum machine_mode mode;
1445{
1446 return ((mode == VOIDmode || GET_MODE (op) == mode)
1447 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1448}
1449
1450
1451/* If BODY is an insn body that uses ASM_OPERANDS,
1452 return the number of operands (both input and output) in the insn.
1453 Otherwise return -1. */
1454
1455int
1456asm_noperands (body)
1457 rtx body;
1458{
1459 switch (GET_CODE (body))
1460 {
1461 case ASM_OPERANDS:
1462 /* No output operands: return number of input operands. */
1463 return ASM_OPERANDS_INPUT_LENGTH (body);
1464 case SET:
1465 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1466 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1467 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1468 else
1469 return -1;
1470 case PARALLEL:
1471 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1472 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1473 {
1474 /* Multiple output operands, or 1 output plus some clobbers:
1475 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1476 int i;
1477 int n_sets;
1478
1479 /* Count backwards through CLOBBERs to determine number of SETs. */
1480 for (i = XVECLEN (body, 0); i > 0; i--)
1481 {
1482 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1483 break;
1484 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1485 return -1;
1486 }
1487
1488 /* N_SETS is now number of output operands. */
1489 n_sets = i;
1490
1491 /* Verify that all the SETs we have
1492 came from a single original asm_operands insn
1493 (so that invalid combinations are blocked). */
1494 for (i = 0; i < n_sets; i++)
1495 {
1496 rtx elt = XVECEXP (body, 0, i);
1497 if (GET_CODE (elt) != SET)
1498 return -1;
1499 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1500 return -1;
1501 /* If these ASM_OPERANDS rtx's came from different original insns
1502 then they aren't allowed together. */
1503 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1504 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1505 return -1;
1506 }
1507 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1508 + n_sets);
1509 }
1510 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1511 {
1512 /* 0 outputs, but some clobbers:
1513 body is [(asm_operands ...) (clobber (reg ...))...]. */
1514 int i;
1515
1516 /* Make sure all the other parallel things really are clobbers. */
1517 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1518 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1519 return -1;
1520
1521 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1522 }
1523 else
1524 return -1;
1525 default:
1526 return -1;
1527 }
1528}
1529
1530/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1531 copy its operands (both input and output) into the vector OPERANDS,
1532 the locations of the operands within the insn into the vector OPERAND_LOCS,
1533 and the constraints for the operands into CONSTRAINTS.
1534 Write the modes of the operands into MODES.
1535 Return the assembler-template.
1536
1537 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1538 we don't store that info. */
1539
1540const char *
1541decode_asm_operands (body, operands, operand_locs, constraints, modes)
1542 rtx body;
1543 rtx *operands;
1544 rtx **operand_locs;
1545 const char **constraints;
1546 enum machine_mode *modes;
1547{
1548 int i;
1549 int noperands;
1550 const char *template = 0;
1551
1552 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1553 {
1554 rtx asmop = SET_SRC (body);
1555 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1556
1557 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1558
1559 for (i = 1; i < noperands; i++)
1560 {
1561 if (operand_locs)
1562 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1563 if (operands)
1564 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1565 if (constraints)
1566 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1567 if (modes)
1568 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1569 }
1570
1571 /* The output is in the SET.
1572 Its constraint is in the ASM_OPERANDS itself. */
1573 if (operands)
1574 operands[0] = SET_DEST (body);
1575 if (operand_locs)
1576 operand_locs[0] = &SET_DEST (body);
1577 if (constraints)
1578 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1579 if (modes)
1580 modes[0] = GET_MODE (SET_DEST (body));
1581 template = ASM_OPERANDS_TEMPLATE (asmop);
1582 }
1583 else if (GET_CODE (body) == ASM_OPERANDS)
1584 {
1585 rtx asmop = body;
1586 /* No output operands: BODY is (asm_operands ....). */
1587
1588 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1589
1590 /* The input operands are found in the 1st element vector. */
1591 /* Constraints for inputs are in the 2nd element vector. */
1592 for (i = 0; i < noperands; i++)
1593 {
1594 if (operand_locs)
1595 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1596 if (operands)
1597 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1598 if (constraints)
1599 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1600 if (modes)
1601 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1602 }
1603 template = ASM_OPERANDS_TEMPLATE (asmop);
1604 }
1605 else if (GET_CODE (body) == PARALLEL
1606 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1607 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1608 {
1609 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1610 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1611 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1612 int nout = 0; /* Does not include CLOBBERs. */
1613
1614 /* At least one output, plus some CLOBBERs. */
1615
1616 /* The outputs are in the SETs.
1617 Their constraints are in the ASM_OPERANDS itself. */
1618 for (i = 0; i < nparallel; i++)
1619 {
1620 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1621 break; /* Past last SET */
1622
1623 if (operands)
1624 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1625 if (operand_locs)
1626 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1627 if (constraints)
1628 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1629 if (modes)
1630 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1631 nout++;
1632 }
1633
1634 for (i = 0; i < nin; i++)
1635 {
1636 if (operand_locs)
1637 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1638 if (operands)
1639 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1640 if (constraints)
1641 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1642 if (modes)
1643 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1644 }
1645
1646 template = ASM_OPERANDS_TEMPLATE (asmop);
1647 }
1648 else if (GET_CODE (body) == PARALLEL
1649 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1650 {
1651 /* No outputs, but some CLOBBERs. */
1652
1653 rtx asmop = XVECEXP (body, 0, 0);
1654 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1655
1656 for (i = 0; i < nin; i++)
1657 {
1658 if (operand_locs)
1659 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1660 if (operands)
1661 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1662 if (constraints)
1663 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1664 if (modes)
1665 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1666 }
1667
1668 template = ASM_OPERANDS_TEMPLATE (asmop);
1669 }
1670
1671 return template;
1672}
1673
1674/* Check if an asm_operand matches it's constraints.
1675 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1676
1677int
1678asm_operand_ok (op, constraint)
1679 rtx op;
1680 const char *constraint;
1681{
1682 int result = 0;
1683
1684 /* Use constrain_operands after reload. */
1685 if (reload_completed)
1686 abort ();
1687
1688 while (*constraint)
1689 {
1690 char c = *constraint++;
1691 switch (c)
1692 {
1693 case '=':
1694 case '+':
1695 case '*':
1696 case '%':
1697 case '?':
1698 case '!':
1699 case '#':
1700 case '&':
1701 case ',':
1702 break;
1703
1704 case '0': case '1': case '2': case '3': case '4':
1705 case '5': case '6': case '7': case '8': case '9':
1706 /* For best results, our caller should have given us the
1707 proper matching constraint, but we can't actually fail
1708 the check if they didn't. Indicate that results are
1709 inconclusive. */
1710 while (ISDIGIT (*constraint))
1711 constraint++;
1712 result = -1;
1713 break;
1714
1715 case 'p':
1716 if (address_operand (op, VOIDmode))
1717 return 1;
1718 break;
1719
1720 case 'm':
1721 case 'V': /* non-offsettable */
1722 if (memory_operand (op, VOIDmode))
1723 return 1;
1724 break;
1725
1726 case 'o': /* offsettable */
1727 if (offsettable_nonstrict_memref_p (op))
1728 return 1;
1729 break;
1730
1731 case '<':
1732 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1733 excepting those that expand_call created. Further, on some
1734 machines which do not have generalized auto inc/dec, an inc/dec
1735 is not a memory_operand.
1736
1737 Match any memory and hope things are resolved after reload. */
1738
1739 if (GET_CODE (op) == MEM
1740 && (1
1741 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1742 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1743 return 1;
1744 break;
1745
1746 case '>':
1747 if (GET_CODE (op) == MEM
1748 && (1
1749 || GET_CODE (XEXP (op, 0)) == PRE_INC
1750 || GET_CODE (XEXP (op, 0)) == POST_INC))
1751 return 1;
1752 break;
1753
1754 case 'E':
1755 case 'F':
1756 if (GET_CODE (op) == CONST_DOUBLE
1757 || (GET_CODE (op) == CONST_VECTOR
1758 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1759 return 1;
1760 break;
1761
1762 case 'G':
1763 if (GET_CODE (op) == CONST_DOUBLE
1764 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1765 return 1;
1766 break;
1767 case 'H':
1768 if (GET_CODE (op) == CONST_DOUBLE
1769 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1770 return 1;
1771 break;
1772
1773 case 's':
1774 if (GET_CODE (op) == CONST_INT
1775 || (GET_CODE (op) == CONST_DOUBLE
1776 && GET_MODE (op) == VOIDmode))
1777 break;
1778 /* FALLTHRU */
1779
1780 case 'i':
1781 if (CONSTANT_P (op)
1782#ifdef LEGITIMATE_PIC_OPERAND_P
1783 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1784#endif
1785 )
1786 return 1;
1787 break;
1788
1789 case 'n':
1790 if (GET_CODE (op) == CONST_INT
1791 || (GET_CODE (op) == CONST_DOUBLE
1792 && GET_MODE (op) == VOIDmode))
1793 return 1;
1794 break;
1795
1796 case 'I':
1797 if (GET_CODE (op) == CONST_INT
1798 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1799 return 1;
1800 break;
1801 case 'J':
1802 if (GET_CODE (op) == CONST_INT
1803 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1804 return 1;
1805 break;
1806 case 'K':
1807 if (GET_CODE (op) == CONST_INT
1808 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1809 return 1;
1810 break;
1811 case 'L':
1812 if (GET_CODE (op) == CONST_INT
1813 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1814 return 1;
1815 break;
1816 case 'M':
1817 if (GET_CODE (op) == CONST_INT
1818 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1819 return 1;
1820 break;
1821 case 'N':
1822 if (GET_CODE (op) == CONST_INT
1823 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1824 return 1;
1825 break;
1826 case 'O':
1827 if (GET_CODE (op) == CONST_INT
1828 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1829 return 1;
1830 break;
1831 case 'P':
1832 if (GET_CODE (op) == CONST_INT
1833 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1834 return 1;
1835 break;
1836
1837 case 'X':
1838 return 1;
1839
1840 case 'g':
1841 if (general_operand (op, VOIDmode))
1842 return 1;
1843 break;
1844
1845 default:
1846 /* For all other letters, we first check for a register class,
1847 otherwise it is an EXTRA_CONSTRAINT. */
1848 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1849 {
1850 case 'r':
1851 if (GET_MODE (op) == BLKmode)
1852 break;
1853 if (register_operand (op, VOIDmode))
1854 return 1;
1855 }
1856#ifdef EXTRA_CONSTRAINT
1857 if (EXTRA_CONSTRAINT (op, c))
1858 return 1;
1859 if (EXTRA_MEMORY_CONSTRAINT (c))
1860 {
1861 /* Every memory operand can be reloaded to fit. */
1862 if (memory_operand (op, VOIDmode))
1863 return 1;
1864 }
1865 if (EXTRA_ADDRESS_CONSTRAINT (c))
1866 {
1867 /* Every address operand can be reloaded to fit. */
1868 if (address_operand (op, VOIDmode))
1869 return 1;
1870 }
1871#endif
1872 break;
1873 }
1874 }
1875
1876 return result;
1877}
1878
1879
1880/* Given an rtx *P, if it is a sum containing an integer constant term,
1881 return the location (type rtx *) of the pointer to that constant term.
1882 Otherwise, return a null pointer. */
1883
1884rtx *
1885find_constant_term_loc (p)
1886 rtx *p;
1887{
1888 rtx *tem;
1889 enum rtx_code code = GET_CODE (*p);
1890
1891 /* If *P IS such a constant term, P is its location. */
1892
1893 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1894 || code == CONST)
1895 return p;
1896
1897 /* Otherwise, if not a sum, it has no constant term. */
1898
1899 if (GET_CODE (*p) != PLUS)
1900 return 0;
1901
1902 /* If one of the summands is constant, return its location. */
1903
1904 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1905 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1906 return p;
1907
1908 /* Otherwise, check each summand for containing a constant term. */
1909
1910 if (XEXP (*p, 0) != 0)
1911 {
1912 tem = find_constant_term_loc (&XEXP (*p, 0));
1913 if (tem != 0)
1914 return tem;
1915 }
1916
1917 if (XEXP (*p, 1) != 0)
1918 {
1919 tem = find_constant_term_loc (&XEXP (*p, 1));
1920 if (tem != 0)
1921 return tem;
1922 }
1923
1924 return 0;
1925}
1926
1927
1928/* Return 1 if OP is a memory reference
1929 whose address contains no side effects
1930 and remains valid after the addition
1931 of a positive integer less than the
1932 size of the object being referenced.
1933
1934 We assume that the original address is valid and do not check it.
1935
1936 This uses strict_memory_address_p as a subroutine, so
1937 don't use it before reload. */
1938
1939int
1940offsettable_memref_p (op)
1941 rtx op;
1942{
1943 return ((GET_CODE (op) == MEM)
1944 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1945}
1946
1947/* Similar, but don't require a strictly valid mem ref:
1948 consider pseudo-regs valid as index or base regs. */
1949
1950int
1951offsettable_nonstrict_memref_p (op)
1952 rtx op;
1953{
1954 return ((GET_CODE (op) == MEM)
1955 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1956}
1957
1958/* Return 1 if Y is a memory address which contains no side effects
1959 and would remain valid after the addition of a positive integer
1960 less than the size of that mode.
1961
1962 We assume that the original address is valid and do not check it.
1963 We do check that it is valid for narrower modes.
1964
1965 If STRICTP is nonzero, we require a strictly valid address,
1966 for the sake of use in reload.c. */
1967
1968int
1969offsettable_address_p (strictp, mode, y)
1970 int strictp;
1971 enum machine_mode mode;
1972 rtx y;
1973{
1974 enum rtx_code ycode = GET_CODE (y);
1975 rtx z;
1976 rtx y1 = y;
1977 rtx *y2;
1978 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1979 (strictp ? strict_memory_address_p : memory_address_p);
1980 unsigned int mode_sz = GET_MODE_SIZE (mode);
1981
1982 if (CONSTANT_ADDRESS_P (y))
1983 return 1;
1984
1985 /* Adjusting an offsettable address involves changing to a narrower mode.
1986 Make sure that's OK. */
1987
1988 if (mode_dependent_address_p (y))
1989 return 0;
1990
1991 /* ??? How much offset does an offsettable BLKmode reference need?
1992 Clearly that depends on the situation in which it's being used.
1993 However, the current situation in which we test 0xffffffff is
1994 less than ideal. Caveat user. */
1995 if (mode_sz == 0)
1996 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1997
1998 /* If the expression contains a constant term,
1999 see if it remains valid when max possible offset is added. */
2000
2001 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2002 {
2003 int good;
2004
2005 y1 = *y2;
2006 *y2 = plus_constant (*y2, mode_sz - 1);
2007 /* Use QImode because an odd displacement may be automatically invalid
2008 for any wider mode. But it should be valid for a single byte. */
2009 good = (*addressp) (QImode, y);
2010
2011 /* In any case, restore old contents of memory. */
2012 *y2 = y1;
2013 return good;
2014 }
2015
2016 if (GET_RTX_CLASS (ycode) == 'a')
2017 return 0;
2018
2019 /* The offset added here is chosen as the maximum offset that
2020 any instruction could need to add when operating on something
2021 of the specified mode. We assume that if Y and Y+c are
2022 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2023 go inside a LO_SUM here, so we do so as well. */
2024 if (GET_CODE (y) == LO_SUM
2025 && mode != BLKmode
2026 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2027 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2028 plus_constant (XEXP (y, 1), mode_sz - 1));
2029 else
2030 z = plus_constant (y, mode_sz - 1);
2031
2032 /* Use QImode because an odd displacement may be automatically invalid
2033 for any wider mode. But it should be valid for a single byte. */
2034 return (*addressp) (QImode, z);
2035}
2036
2037/* Return 1 if ADDR is an address-expression whose effect depends
2038 on the mode of the memory reference it is used in.
2039
2040 Autoincrement addressing is a typical example of mode-dependence
2041 because the amount of the increment depends on the mode. */
2042
2043int
2044mode_dependent_address_p (addr)
2045 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2046{
2047 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2048 return 0;
2049 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2050 win: ATTRIBUTE_UNUSED_LABEL
2051 return 1;
2052}
2053
2054/* Return 1 if OP is a general operand
2055 other than a memory ref with a mode dependent address. */
2056
2057int
2058mode_independent_operand (op, mode)
2059 enum machine_mode mode;
2060 rtx op;
2061{
2062 rtx addr;
2063
2064 if (! general_operand (op, mode))
2065 return 0;
2066
2067 if (GET_CODE (op) != MEM)
2068 return 1;
2069
2070 addr = XEXP (op, 0);
2071 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2072 return 1;
2073 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2074 lose: ATTRIBUTE_UNUSED_LABEL
2075 return 0;
2076}
2077
2078
2079/* Like extract_insn, but save insn extracted and don't extract again, when
2080 called again for the same insn expecting that recog_data still contain the
2081 valid information. This is used primary by gen_attr infrastructure that
2082 often does extract insn again and again. */
2083void
2084extract_insn_cached (insn)
2085 rtx insn;
2086{
2087 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2088 return;
2089 extract_insn (insn);
2090 recog_data.insn = insn;
2091}
2092/* Do cached extract_insn, constrain_operand and complain about failures.
2093 Used by insn_attrtab. */
2094void
2095extract_constrain_insn_cached (insn)
2096 rtx insn;
2097{
2098 extract_insn_cached (insn);
2099 if (which_alternative == -1
2100 && !constrain_operands (reload_completed))
2101 fatal_insn_not_found (insn);
2102}
2103/* Do cached constrain_operand and complain about failures. */
2104int
2105constrain_operands_cached (strict)
2106 int strict;
2107{
2108 if (which_alternative == -1)
2109 return constrain_operands (strict);
2110 else
2111 return 1;
2112}
2113
2114
2115/* Analyze INSN and fill in recog_data. */
2116
2117void
2118extract_insn (insn)
2119 rtx insn;
2120{
2121 int i;
2122 int icode;
2123 int noperands;
2124 rtx body = PATTERN (insn);
2125
2126 recog_data.insn = NULL;
2127 recog_data.n_operands = 0;
2128 recog_data.n_alternatives = 0;
2129 recog_data.n_dups = 0;
2130 which_alternative = -1;
2131
2132 switch (GET_CODE (body))
2133 {
2134 case USE:
2135 case CLOBBER:
2136 case ASM_INPUT:
2137 case ADDR_VEC:
2138 case ADDR_DIFF_VEC:
2139 return;
2140
2141 case SET:
2142 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2143 goto asm_insn;
2144 else
2145 goto normal_insn;
2146 case PARALLEL:
2147 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2148 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2149 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2150 goto asm_insn;
2151 else
2152 goto normal_insn;
2153 case ASM_OPERANDS:
2154 asm_insn:
2155 recog_data.n_operands = noperands = asm_noperands (body);
2156 if (noperands >= 0)
2157 {
2158 /* This insn is an `asm' with operands. */
2159
2160 /* expand_asm_operands makes sure there aren't too many operands. */
2161 if (noperands > MAX_RECOG_OPERANDS)
2162 abort ();
2163
2164 /* Now get the operand values and constraints out of the insn. */
2165 decode_asm_operands (body, recog_data.operand,
2166 recog_data.operand_loc,
2167 recog_data.constraints,
2168 recog_data.operand_mode);
2169 if (noperands > 0)
2170 {
2171 const char *p = recog_data.constraints[0];
2172 recog_data.n_alternatives = 1;
2173 while (*p)
2174 recog_data.n_alternatives += (*p++ == ',');
2175 }
2176 break;
2177 }
2178 fatal_insn_not_found (insn);
2179
2180 default:
2181 normal_insn:
2182 /* Ordinary insn: recognize it, get the operands via insn_extract
2183 and get the constraints. */
2184
2185 icode = recog_memoized (insn);
2186 if (icode < 0)
2187 fatal_insn_not_found (insn);
2188
2189 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2190 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2191 recog_data.n_dups = insn_data[icode].n_dups;
2192
2193 insn_extract (insn);
2194
2195 for (i = 0; i < noperands; i++)
2196 {
2197 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2198 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2199 /* VOIDmode match_operands gets mode from their real operand. */
2200 if (recog_data.operand_mode[i] == VOIDmode)
2201 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2202 }
2203 }
2204 for (i = 0; i < noperands; i++)
2205 recog_data.operand_type[i]
2206 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2207 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2208 : OP_IN);
2209
2210 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2211 abort ();
2212}
2213
2214/* After calling extract_insn, you can use this function to extract some
2215 information from the constraint strings into a more usable form.
2216 The collected data is stored in recog_op_alt. */
2217void
2218preprocess_constraints ()
2219{
2220 int i;
2221
2222 memset (recog_op_alt, 0, sizeof recog_op_alt);
2223 for (i = 0; i < recog_data.n_operands; i++)
2224 {
2225 int j;
2226 struct operand_alternative *op_alt;
2227 const char *p = recog_data.constraints[i];
2228
2229 op_alt = recog_op_alt[i];
2230
2231 for (j = 0; j < recog_data.n_alternatives; j++)
2232 {
2233 op_alt[j].class = NO_REGS;
2234 op_alt[j].constraint = p;
2235 op_alt[j].matches = -1;
2236 op_alt[j].matched = -1;
2237
2238 if (*p == '\0' || *p == ',')
2239 {
2240 op_alt[j].anything_ok = 1;
2241 continue;
2242 }
2243
2244 for (;;)
2245 {
2246 char c = *p++;
2247 if (c == '#')
2248 do
2249 c = *p++;
2250 while (c != ',' && c != '\0');
2251 if (c == ',' || c == '\0')
2252 break;
2253
2254 switch (c)
2255 {
2256 case '=': case '+': case '*': case '%':
2257 case 'E': case 'F': case 'G': case 'H':
2258 case 's': case 'i': case 'n':
2259 case 'I': case 'J': case 'K': case 'L':
2260 case 'M': case 'N': case 'O': case 'P':
2261 /* These don't say anything we care about. */
2262 break;
2263
2264 case '?':
2265 op_alt[j].reject += 6;
2266 break;
2267 case '!':
2268 op_alt[j].reject += 600;
2269 break;
2270 case '&':
2271 op_alt[j].earlyclobber = 1;
2272 break;
2273
2274 case '0': case '1': case '2': case '3': case '4':
2275 case '5': case '6': case '7': case '8': case '9':
2276 {
2277 char *end;
2278 op_alt[j].matches = strtoul (p - 1, &end, 10);
2279 recog_op_alt[op_alt[j].matches][j].matched = i;
2280 p = end;
2281 }
2282 break;
2283
2284 case 'm':
2285 op_alt[j].memory_ok = 1;
2286 break;
2287 case '<':
2288 op_alt[j].decmem_ok = 1;
2289 break;
2290 case '>':
2291 op_alt[j].incmem_ok = 1;
2292 break;
2293 case 'V':
2294 op_alt[j].nonoffmem_ok = 1;
2295 break;
2296 case 'o':
2297 op_alt[j].offmem_ok = 1;
2298 break;
2299 case 'X':
2300 op_alt[j].anything_ok = 1;
2301 break;
2302
2303 case 'p':
2304 op_alt[j].is_address = 1;
2305 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2306 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2307 break;
2308
2309 case 'g': case 'r':
2310 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2311 break;
2312
2313 default:
2314 if (EXTRA_MEMORY_CONSTRAINT (c))
2315 {
2316 op_alt[j].memory_ok = 1;
2317 break;
2318 }
2319 if (EXTRA_ADDRESS_CONSTRAINT (c))
2320 {
2321 op_alt[j].is_address = 1;
2322 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2323 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2324 break;
2325 }
2326
2327 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2328 break;
2329 }
2330 }
2331 }
2332 }
2333}
2334
2335/* Check the operands of an insn against the insn's operand constraints
2336 and return 1 if they are valid.
2337 The information about the insn's operands, constraints, operand modes
2338 etc. is obtained from the global variables set up by extract_insn.
2339
2340 WHICH_ALTERNATIVE is set to a number which indicates which
2341 alternative of constraints was matched: 0 for the first alternative,
2342 1 for the next, etc.
2343
2344 In addition, when two operands are match
2345 and it happens that the output operand is (reg) while the
2346 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2347 make the output operand look like the input.
2348 This is because the output operand is the one the template will print.
2349
2350 This is used in final, just before printing the assembler code and by
2351 the routines that determine an insn's attribute.
2352
2353 If STRICT is a positive nonzero value, it means that we have been
2354 called after reload has been completed. In that case, we must
2355 do all checks strictly. If it is zero, it means that we have been called
2356 before reload has completed. In that case, we first try to see if we can
2357 find an alternative that matches strictly. If not, we try again, this
2358 time assuming that reload will fix up the insn. This provides a "best
2359 guess" for the alternative and is used to compute attributes of insns prior
2360 to reload. A negative value of STRICT is used for this internal call. */
2361
2362struct funny_match
2363{
2364 int this, other;
2365};
2366
2367int
2368constrain_operands (strict)
2369 int strict;
2370{
2371 const char *constraints[MAX_RECOG_OPERANDS];
2372 int matching_operands[MAX_RECOG_OPERANDS];
2373 int earlyclobber[MAX_RECOG_OPERANDS];
2374 int c;
2375
2376 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2377 int funny_match_index;
2378
2379 which_alternative = 0;
2380 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2381 return 1;
2382
2383 for (c = 0; c < recog_data.n_operands; c++)
2384 {
2385 constraints[c] = recog_data.constraints[c];
2386 matching_operands[c] = -1;
2387 }
2388
2389 do
2390 {
2391 int opno;
2392 int lose = 0;
2393 funny_match_index = 0;
2394
2395 for (opno = 0; opno < recog_data.n_operands; opno++)
2396 {
2397 rtx op = recog_data.operand[opno];
2398 enum machine_mode mode = GET_MODE (op);
2399 const char *p = constraints[opno];
2400 int offset = 0;
2401 int win = 0;
2402 int val;
2403
2404 earlyclobber[opno] = 0;
2405
2406 /* A unary operator may be accepted by the predicate, but it
2407 is irrelevant for matching constraints. */
2408 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2409 op = XEXP (op, 0);
2410
2411 if (GET_CODE (op) == SUBREG)
2412 {
2413 if (GET_CODE (SUBREG_REG (op)) == REG
2414 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2415 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2416 GET_MODE (SUBREG_REG (op)),
2417 SUBREG_BYTE (op),
2418 GET_MODE (op));
2419 op = SUBREG_REG (op);
2420 }
2421
2422 /* An empty constraint or empty alternative
2423 allows anything which matched the pattern. */
2424 if (*p == 0 || *p == ',')
2425 win = 1;
2426
2427 while (*p && (c = *p++) != ',')
2428 switch (c)
2429 {
2430 case '?': case '!': case '*': case '%':
2431 case '=': case '+':
2432 break;
2433
2434 case '#':
2435 /* Ignore rest of this alternative as far as
2436 constraint checking is concerned. */
2437 while (*p && *p != ',')
2438 p++;
2439 break;
2440
2441 case '&':
2442 earlyclobber[opno] = 1;
2443 break;
2444
2445 case '0': case '1': case '2': case '3': case '4':
2446 case '5': case '6': case '7': case '8': case '9':
2447 {
2448 /* This operand must be the same as a previous one.
2449 This kind of constraint is used for instructions such
2450 as add when they take only two operands.
2451
2452 Note that the lower-numbered operand is passed first.
2453
2454 If we are not testing strictly, assume that this
2455 constraint will be satisfied. */
2456
2457 char *end;
2458 int match;
2459
2460 match = strtoul (p - 1, &end, 10);
2461 p = end;
2462
2463 if (strict < 0)
2464 val = 1;
2465 else
2466 {
2467 rtx op1 = recog_data.operand[match];
2468 rtx op2 = recog_data.operand[opno];
2469
2470 /* A unary operator may be accepted by the predicate,
2471 but it is irrelevant for matching constraints. */
2472 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2473 op1 = XEXP (op1, 0);
2474 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2475 op2 = XEXP (op2, 0);
2476
2477 val = operands_match_p (op1, op2);
2478 }
2479
2480 matching_operands[opno] = match;
2481 matching_operands[match] = opno;
2482
2483 if (val != 0)
2484 win = 1;
2485
2486 /* If output is *x and input is *--x, arrange later
2487 to change the output to *--x as well, since the
2488 output op is the one that will be printed. */
2489 if (val == 2 && strict > 0)
2490 {
2491 funny_match[funny_match_index].this = opno;
2492 funny_match[funny_match_index++].other = match;
2493 }
2494 }
2495 break;
2496
2497 case 'p':
2498 /* p is used for address_operands. When we are called by
2499 gen_reload, no one will have checked that the address is
2500 strictly valid, i.e., that all pseudos requiring hard regs
2501 have gotten them. */
2502 if (strict <= 0
2503 || (strict_memory_address_p (recog_data.operand_mode[opno],
2504 op)))
2505 win = 1;
2506 break;
2507
2508 /* No need to check general_operand again;
2509 it was done in insn-recog.c. */
2510 case 'g':
2511 /* Anything goes unless it is a REG and really has a hard reg
2512 but the hard reg is not in the class GENERAL_REGS. */
2513 if (strict < 0
2514 || GENERAL_REGS == ALL_REGS
2515 || GET_CODE (op) != REG
2516 || (reload_in_progress
2517 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2518 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2519 win = 1;
2520 break;
2521
2522 case 'X':
2523 /* This is used for a MATCH_SCRATCH in the cases when
2524 we don't actually need anything. So anything goes
2525 any time. */
2526 win = 1;
2527 break;
2528
2529 case 'm':
2530 /* Memory operands must be valid, to the extent
2531 required by STRICT. */
2532 if (GET_CODE (op) == MEM)
2533 {
2534 if (strict > 0
2535 && !strict_memory_address_p (GET_MODE (op),
2536 XEXP (op, 0)))
2537 break;
2538 if (strict == 0
2539 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2540 break;
2541 win = 1;
2542 }
2543 /* Before reload, accept what reload can turn into mem. */
2544 else if (strict < 0 && CONSTANT_P (op))
2545 win = 1;
2546 /* During reload, accept a pseudo */
2547 else if (reload_in_progress && GET_CODE (op) == REG
2548 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2549 win = 1;
2550 break;
2551
2552 case '<':
2553 if (GET_CODE (op) == MEM
2554 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2555 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2556 win = 1;
2557 break;
2558
2559 case '>':
2560 if (GET_CODE (op) == MEM
2561 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2562 || GET_CODE (XEXP (op, 0)) == POST_INC))
2563 win = 1;
2564 break;
2565
2566 case 'E':
2567 case 'F':
2568 if (GET_CODE (op) == CONST_DOUBLE
2569 || (GET_CODE (op) == CONST_VECTOR
2570 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2571 win = 1;
2572 break;
2573
2574 case 'G':
2575 case 'H':
2576 if (GET_CODE (op) == CONST_DOUBLE
2577 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2578 win = 1;
2579 break;
2580
2581 case 's':
2582 if (GET_CODE (op) == CONST_INT
2583 || (GET_CODE (op) == CONST_DOUBLE
2584 && GET_MODE (op) == VOIDmode))
2585 break;
2586 case 'i':
2587 if (CONSTANT_P (op))
2588 win = 1;
2589 break;
2590
2591 case 'n':
2592 if (GET_CODE (op) == CONST_INT
2593 || (GET_CODE (op) == CONST_DOUBLE
2594 && GET_MODE (op) == VOIDmode))
2595 win = 1;
2596 break;
2597
2598 case 'I':
2599 case 'J':
2600 case 'K':
2601 case 'L':
2602 case 'M':
2603 case 'N':
2604 case 'O':
2605 case 'P':
2606 if (GET_CODE (op) == CONST_INT
2607 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2608 win = 1;
2609 break;
2610
2611 case 'V':
2612 if (GET_CODE (op) == MEM
2613 && ((strict > 0 && ! offsettable_memref_p (op))
2614 || (strict < 0
2615 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2616 || (reload_in_progress
2617 && !(GET_CODE (op) == REG
2618 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2619 win = 1;
2620 break;
2621
2622 case 'o':
2623 if ((strict > 0 && offsettable_memref_p (op))
2624 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2625 /* Before reload, accept what reload can handle. */
2626 || (strict < 0
2627 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2628 /* During reload, accept a pseudo */
2629 || (reload_in_progress && GET_CODE (op) == REG
2630 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2631 win = 1;
2632 break;
2633
2634 default:
2635 {
2636 enum reg_class class;
2637
2638 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2639 if (class != NO_REGS)
2640 {
2641 if (strict < 0
2642 || (strict == 0
2643 && GET_CODE (op) == REG
2644 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2645 || (strict == 0 && GET_CODE (op) == SCRATCH)
2646 || (GET_CODE (op) == REG
2647 && reg_fits_class_p (op, class, offset, mode)))
2648 win = 1;
2649 }
2650#ifdef EXTRA_CONSTRAINT
2651 else if (EXTRA_CONSTRAINT (op, c))
2652 win = 1;
2653
2654 if (EXTRA_MEMORY_CONSTRAINT (c))
2655 {
2656 /* Every memory operand can be reloaded to fit. */
2657 if (strict < 0 && GET_CODE (op) == MEM)
2658 win = 1;
2659
2660 /* Before reload, accept what reload can turn into mem. */
2661 if (strict < 0 && CONSTANT_P (op))
2662 win = 1;
2663
2664 /* During reload, accept a pseudo */
2665 if (reload_in_progress && GET_CODE (op) == REG
2666 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2667 win = 1;
2668 }
2669 if (EXTRA_ADDRESS_CONSTRAINT (c))
2670 {
2671 /* Every address operand can be reloaded to fit. */
2672 if (strict < 0)
2673 win = 1;
2674 }
2675#endif
2676 break;
2677 }
2678 }
2679
2680 constraints[opno] = p;
2681 /* If this operand did not win somehow,
2682 this alternative loses. */
2683 if (! win)
2684 lose = 1;
2685 }
2686 /* This alternative won; the operands are ok.
2687 Change whichever operands this alternative says to change. */
2688 if (! lose)
2689 {
2690 int opno, eopno;
2691
2692 /* See if any earlyclobber operand conflicts with some other
2693 operand. */
2694
2695 if (strict > 0)
2696 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2697 /* Ignore earlyclobber operands now in memory,
2698 because we would often report failure when we have
2699 two memory operands, one of which was formerly a REG. */
2700 if (earlyclobber[eopno]
2701 && GET_CODE (recog_data.operand[eopno]) == REG)
2702 for (opno = 0; opno < recog_data.n_operands; opno++)
2703 if ((GET_CODE (recog_data.operand[opno]) == MEM
2704 || recog_data.operand_type[opno] != OP_OUT)
2705 && opno != eopno
2706 /* Ignore things like match_operator operands. */
2707 && *recog_data.constraints[opno] != 0
2708 && ! (matching_operands[opno] == eopno
2709 && operands_match_p (recog_data.operand[opno],
2710 recog_data.operand[eopno]))
2711 && ! safe_from_earlyclobber (recog_data.operand[opno],
2712 recog_data.operand[eopno]))
2713 lose = 1;
2714
2715 if (! lose)
2716 {
2717 while (--funny_match_index >= 0)
2718 {
2719 recog_data.operand[funny_match[funny_match_index].other]
2720 = recog_data.operand[funny_match[funny_match_index].this];
2721 }
2722
2723 return 1;
2724 }
2725 }
2726
2727 which_alternative++;
2728 }
2729 while (which_alternative < recog_data.n_alternatives);
2730
2731 which_alternative = -1;
2732 /* If we are about to reject this, but we are not to test strictly,
2733 try a very loose test. Only return failure if it fails also. */
2734 if (strict == 0)
2735 return constrain_operands (-1);
2736 else
2737 return 0;
2738}
2739
2740/* Return 1 iff OPERAND (assumed to be a REG rtx)
2741 is a hard reg in class CLASS when its regno is offset by OFFSET
2742 and changed to mode MODE.
2743 If REG occupies multiple hard regs, all of them must be in CLASS. */
2744
2745int
2746reg_fits_class_p (operand, class, offset, mode)
2747 rtx operand;
2748 enum reg_class class;
2749 int offset;
2750 enum machine_mode mode;
2751{
2752 int regno = REGNO (operand);
2753 if (regno < FIRST_PSEUDO_REGISTER
2754 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2755 regno + offset))
2756 {
2757 int sr;
2758 regno += offset;
2759 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2760 sr > 0; sr--)
2761 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2762 regno + sr))
2763 break;
2764 return sr == 0;
2765 }
2766
2767 return 0;
2768}
2769
2770
2771/* Split single instruction. Helper function for split_all_insns and
2772 split_all_insns_noflow. Return last insn in the sequence if successful,
2773 or NULL if unsuccessful. */
2774
2775static rtx
2776split_insn (insn)
2777 rtx insn;
2778{
2779 /* Split insns here to get max fine-grain parallelism. */
2780 rtx first = PREV_INSN (insn);
2781 rtx last = try_split (PATTERN (insn), insn, 1);
2782
2783 if (last == insn)
2784 return NULL_RTX;
2785
2786 /* try_split returns the NOTE that INSN became. */
2787 PUT_CODE (insn, NOTE);
2788 NOTE_SOURCE_FILE (insn) = 0;
2789 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2790
2791 /* ??? Coddle to md files that generate subregs in post-reload
2792 splitters instead of computing the proper hard register. */
2793 if (reload_completed && first != last)
2794 {
2795 first = NEXT_INSN (first);
2796 for (;;)
2797 {
2798 if (INSN_P (first))
2799 cleanup_subreg_operands (first);
2800 if (first == last)
2801 break;
2802 first = NEXT_INSN (first);
2803 }
2804 }
2805 return last;
2806}
2807
2808/* Split all insns in the function. If UPD_LIFE, update life info after. */
2809
2810void
2811split_all_insns (upd_life)
2812 int upd_life;
2813{
2814 sbitmap blocks;
2815 bool changed;
2816 basic_block bb;
2817
2818 blocks = sbitmap_alloc (last_basic_block);
2819 sbitmap_zero (blocks);
2820 changed = false;
2821
2822 FOR_EACH_BB_REVERSE (bb)
2823 {
2824 rtx insn, next;
2825 bool finish = false;
2826
2827 for (insn = bb->head; !finish ; insn = next)
2828 {
2829 /* Can't use `next_real_insn' because that might go across
2830 CODE_LABELS and short-out basic blocks. */
2831 next = NEXT_INSN (insn);
2832 finish = (insn == bb->end);
2833 if (INSN_P (insn))
2834 {
2835 rtx set = single_set (insn);
2836
2837 /* Don't split no-op move insns. These should silently
2838 disappear later in final. Splitting such insns would
2839 break the code that handles REG_NO_CONFLICT blocks. */
2840 if (set && set_noop_p (set))
2841 {
2842 /* Nops get in the way while scheduling, so delete them
2843 now if register allocation has already been done. It
2844 is too risky to try to do this before register
2845 allocation, and there are unlikely to be very many
2846 nops then anyways. */
2847 if (reload_completed)
2848 {
2849 /* If the no-op set has a REG_UNUSED note, we need
2850 to update liveness information. */
2851 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2852 {
2853 SET_BIT (blocks, bb->index);
2854 changed = true;
2855 }
2856 /* ??? Is life info affected by deleting edges? */
2857 delete_insn_and_edges (insn);
2858 }
2859 }
2860 else
2861 {
2862 rtx last = split_insn (insn);
2863 if (last)
2864 {
2865 /* The split sequence may include barrier, but the
2866 BB boundary we are interested in will be set to
2867 previous one. */
2868
2869 while (GET_CODE (last) == BARRIER)
2870 last = PREV_INSN (last);
2871 SET_BIT (blocks, bb->index);
2872 changed = true;
2873 }
2874 }
2875 }
2876 }
2877 }
2878
2879 if (changed)
2880 {
2881 int old_last_basic_block = last_basic_block;
2882
2883 find_many_sub_basic_blocks (blocks);
2884
2885 if (old_last_basic_block != last_basic_block && upd_life)
2886 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2887 }
2888
2889 if (changed && upd_life)
2890 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2891 PROP_DEATH_NOTES | PROP_REG_INFO);
2892
2893#ifdef ENABLE_CHECKING
2894 verify_flow_info ();
2895#endif
2896
2897 sbitmap_free (blocks);
2898}
2899
2900/* Same as split_all_insns, but do not expect CFG to be available.
2901 Used by machine depedent reorg passes. */
2902
2903void
2904split_all_insns_noflow ()
2905{
2906 rtx next, insn;
2907
2908 for (insn = get_insns (); insn; insn = next)
2909 {
2910 next = NEXT_INSN (insn);
2911 if (INSN_P (insn))
2912 {
2913 /* Don't split no-op move insns. These should silently
2914 disappear later in final. Splitting such insns would
2915 break the code that handles REG_NO_CONFLICT blocks. */
2916 rtx set = single_set (insn);
2917 if (set && set_noop_p (set))
2918 {
2919 /* Nops get in the way while scheduling, so delete them
2920 now if register allocation has already been done. It
2921 is too risky to try to do this before register
2922 allocation, and there are unlikely to be very many
2923 nops then anyways.
2924
2925 ??? Should we use delete_insn when the CFG isn't valid? */
2926 if (reload_completed)
2927 delete_insn_and_edges (insn);
2928 }
2929 else
2930 split_insn (insn);
2931 }
2932 }
2933}
2934
2935
2936#ifdef HAVE_peephole2
2937struct peep2_insn_data
2938{
2939 rtx insn;
2940 regset live_before;
2941};
2942
2943static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2944static int peep2_current;
2945
2946/* A non-insn marker indicating the last insn of the block.
2947 The live_before regset for this element is correct, indicating
2948 global_live_at_end for the block. */
2949#define PEEP2_EOB pc_rtx
2950
2951/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2952 does not exist. Used by the recognizer to find the next insn to match
2953 in a multi-insn pattern. */
2954
2955rtx
2956peep2_next_insn (n)
2957 int n;
2958{
2959 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2960 abort ();
2961
2962 n += peep2_current;
2963 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2964 n -= MAX_INSNS_PER_PEEP2 + 1;
2965
2966 if (peep2_insn_data[n].insn == PEEP2_EOB)
2967 return NULL_RTX;
2968 return peep2_insn_data[n].insn;
2969}
2970
2971/* Return true if REGNO is dead before the Nth non-note insn
2972 after `current'. */
2973
2974int
2975peep2_regno_dead_p (ofs, regno)
2976 int ofs;
2977 int regno;
2978{
2979 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2980 abort ();
2981
2982 ofs += peep2_current;
2983 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2984 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2985
2986 if (peep2_insn_data[ofs].insn == NULL_RTX)
2987 abort ();
2988
2989 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2990}
2991
2992/* Similarly for a REG. */
2993
2994int
2995peep2_reg_dead_p (ofs, reg)
2996 int ofs;
2997 rtx reg;
2998{
2999 int regno, n;
3000
3001 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3002 abort ();
3003
3004 ofs += peep2_current;
3005 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3006 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3007
3008 if (peep2_insn_data[ofs].insn == NULL_RTX)
3009 abort ();
3010
3011 regno = REGNO (reg);
3012 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3013 while (--n >= 0)
3014 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3015 return 0;
3016 return 1;
3017}
3018
3019/* Try to find a hard register of mode MODE, matching the register class in
3020 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3021 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3022 in which case the only condition is that the register must be available
3023 before CURRENT_INSN.
3024 Registers that already have bits set in REG_SET will not be considered.
3025
3026 If an appropriate register is available, it will be returned and the
3027 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3028 returned. */
3029
3030rtx
3031peep2_find_free_register (from, to, class_str, mode, reg_set)
3032 int from, to;
3033 const char *class_str;
3034 enum machine_mode mode;
3035 HARD_REG_SET *reg_set;
3036{
3037 static int search_ofs;
3038 enum reg_class class;
3039 HARD_REG_SET live;
3040 int i;
3041
3042 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3043 abort ();
3044
3045 from += peep2_current;
3046 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3047 from -= MAX_INSNS_PER_PEEP2 + 1;
3048 to += peep2_current;
3049 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3050 to -= MAX_INSNS_PER_PEEP2 + 1;
3051
3052 if (peep2_insn_data[from].insn == NULL_RTX)
3053 abort ();
3054 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3055
3056 while (from != to)
3057 {
3058 HARD_REG_SET this_live;
3059
3060 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3061 from = 0;
3062 if (peep2_insn_data[from].insn == NULL_RTX)
3063 abort ();
3064 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3065 IOR_HARD_REG_SET (live, this_live);
3066 }
3067
3068 class = (class_str[0] == 'r' ? GENERAL_REGS
3069 : REG_CLASS_FROM_LETTER (class_str[0]));
3070
3071 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072 {
3073 int raw_regno, regno, success, j;
3074
3075 /* Distribute the free registers as much as possible. */
3076 raw_regno = search_ofs + i;
3077 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3078 raw_regno -= FIRST_PSEUDO_REGISTER;
3079#ifdef REG_ALLOC_ORDER
3080 regno = reg_alloc_order[raw_regno];
3081#else
3082 regno = raw_regno;
3083#endif
3084
3085 /* Don't allocate fixed registers. */
3086 if (fixed_regs[regno])
3087 continue;
3088 /* Make sure the register is of the right class. */
3089 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3090 continue;
3091 /* And can support the mode we need. */
3092 if (! HARD_REGNO_MODE_OK (regno, mode))
3093 continue;
3094 /* And that we don't create an extra save/restore. */
3095 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3096 continue;
3097 /* And we don't clobber traceback for noreturn functions. */
3098 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3099 && (! reload_completed || frame_pointer_needed))
3100 continue;
3101
3102 success = 1;
3103 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3104 {
3105 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3106 || TEST_HARD_REG_BIT (live, regno + j))
3107 {
3108 success = 0;
3109 break;
3110 }
3111 }
3112 if (success)
3113 {
3114 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3115 SET_HARD_REG_BIT (*reg_set, regno + j);
3116
3117 /* Start the next search with the next register. */
3118 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3119 raw_regno = 0;
3120 search_ofs = raw_regno;
3121
3122 return gen_rtx_REG (mode, regno);
3123 }
3124 }
3125
3126 search_ofs = 0;
3127 return NULL_RTX;
3128}
3129
3130/* Perform the peephole2 optimization pass. */
3131
3132void
3133peephole2_optimize (dump_file)
3134 FILE *dump_file ATTRIBUTE_UNUSED;
3135{
3136 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3137 rtx insn, prev;
3138 regset live;
3139 int i;
3140 basic_block bb;
3141#ifdef HAVE_conditional_execution
3142 sbitmap blocks;
3143 bool changed;
3144#endif
3145 bool do_cleanup_cfg = false;
3146 bool do_rebuild_jump_labels = false;
3147
3148 /* Initialize the regsets we're going to use. */
3149 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3150 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3151 live = INITIALIZE_REG_SET (rs_heads[i]);
3152
3153#ifdef HAVE_conditional_execution
3154 blocks = sbitmap_alloc (last_basic_block);
3155 sbitmap_zero (blocks);
3156 changed = false;
3157#else
3158 count_or_remove_death_notes (NULL, 1);
3159#endif
3160
3161 FOR_EACH_BB_REVERSE (bb)
3162 {
3163 struct propagate_block_info *pbi;
3164
3165 /* Indicate that all slots except the last holds invalid data. */
3166 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3167 peep2_insn_data[i].insn = NULL_RTX;
3168
3169 /* Indicate that the last slot contains live_after data. */
3170 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3171 peep2_current = MAX_INSNS_PER_PEEP2;
3172
3173 /* Start up propagation. */
3174 COPY_REG_SET (live, bb->global_live_at_end);
3175 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3176
3177#ifdef HAVE_conditional_execution
3178 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3179#else
3180 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3181#endif
3182
3183 for (insn = bb->end; ; insn = prev)
3184 {
3185 prev = PREV_INSN (insn);
3186 if (INSN_P (insn))
3187 {
3188 rtx try, before_try, x;
3189 int match_len;
3190 rtx note;
3191 bool was_call = false;
3192
3193 /* Record this insn. */
3194 if (--peep2_current < 0)
3195 peep2_current = MAX_INSNS_PER_PEEP2;
3196 peep2_insn_data[peep2_current].insn = insn;
3197 propagate_one_insn (pbi, insn);
3198 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3199
3200 /* Match the peephole. */
3201 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3202 if (try != NULL)
3203 {
3204 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3205 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3206 cfg-related call notes. */
3207 for (i = 0; i <= match_len; ++i)
3208 {
3209 int j;
3210 rtx old_insn, new_insn, note;
3211
3212 j = i + peep2_current;
3213 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3214 j -= MAX_INSNS_PER_PEEP2 + 1;
3215 old_insn = peep2_insn_data[j].insn;
3216 if (GET_CODE (old_insn) != CALL_INSN)
3217 continue;
3218 was_call = true;
3219
3220 new_insn = try;
3221 while (new_insn != NULL_RTX)
3222 {
3223 if (GET_CODE (new_insn) == CALL_INSN)
3224 break;
3225 new_insn = NEXT_INSN (new_insn);
3226 }
3227
3228 if (new_insn == NULL_RTX)
3229 abort ();
3230
3231 CALL_INSN_FUNCTION_USAGE (new_insn)
3232 = CALL_INSN_FUNCTION_USAGE (old_insn);
3233
3234 for (note = REG_NOTES (old_insn);
3235 note;
3236 note = XEXP (note, 1))
3237 switch (REG_NOTE_KIND (note))
3238 {
3239 case REG_NORETURN:
3240 case REG_SETJMP:
3241 case REG_ALWAYS_RETURN:
3242 REG_NOTES (new_insn)
3243 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3244 XEXP (note, 0),
3245 REG_NOTES (new_insn));
3246 default:
3247 /* Discard all other reg notes. */
3248 break;
3249 }
3250
3251 /* Croak if there is another call in the sequence. */
3252 while (++i <= match_len)
3253 {
3254 j = i + peep2_current;
3255 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3256 j -= MAX_INSNS_PER_PEEP2 + 1;
3257 old_insn = peep2_insn_data[j].insn;
3258 if (GET_CODE (old_insn) == CALL_INSN)
3259 abort ();
3260 }
3261 break;
3262 }
3263
3264 i = match_len + peep2_current;
3265 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3266 i -= MAX_INSNS_PER_PEEP2 + 1;
3267
3268 note = find_reg_note (peep2_insn_data[i].insn,
3269 REG_EH_REGION, NULL_RTX);
3270
3271 /* Replace the old sequence with the new. */
3272 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3273 INSN_SCOPE (peep2_insn_data[i].insn));
3274 before_try = PREV_INSN (insn);
3275 delete_insn_chain (insn, peep2_insn_data[i].insn);
3276
3277 /* Re-insert the EH_REGION notes. */
3278 if (note || (was_call && nonlocal_goto_handler_labels))
3279 {
3280 edge eh_edge;
3281
3282 for (eh_edge = bb->succ; eh_edge
3283 ; eh_edge = eh_edge->succ_next)
3284 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3285 break;
3286
3287 for (x = try ; x != before_try ; x = PREV_INSN (x))
3288 if (GET_CODE (x) == CALL_INSN
3289 || (flag_non_call_exceptions
3290 && may_trap_p (PATTERN (x))
3291 && !find_reg_note (x, REG_EH_REGION, NULL)))
3292 {
3293 if (note)
3294 REG_NOTES (x)
3295 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3296 XEXP (note, 0),
3297 REG_NOTES (x));
3298
3299 if (x != bb->end && eh_edge)
3300 {
3301 edge nfte, nehe;
3302 int flags;
3303
3304 nfte = split_block (bb, x);
3305 flags = (eh_edge->flags
3306 & (EDGE_EH | EDGE_ABNORMAL));
3307 if (GET_CODE (x) == CALL_INSN)
3308 flags |= EDGE_ABNORMAL_CALL;
3309 nehe = make_edge (nfte->src, eh_edge->dest,
3310 flags);
3311
3312 nehe->probability = eh_edge->probability;
3313 nfte->probability
3314 = REG_BR_PROB_BASE - nehe->probability;
3315
3316 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3317#ifdef HAVE_conditional_execution
3318 SET_BIT (blocks, nfte->dest->index);
3319 changed = true;
3320#endif
3321 bb = nfte->src;
3322 eh_edge = nehe;
3323 }
3324 }
3325
3326 /* Converting possibly trapping insn to non-trapping is
3327 possible. Zap dummy outgoing edges. */
3328 do_cleanup_cfg |= purge_dead_edges (bb);
3329 }
3330
3331#ifdef HAVE_conditional_execution
3332 /* With conditional execution, we cannot back up the
3333 live information so easily, since the conditional
3334 death data structures are not so self-contained.
3335 So record that we've made a modification to this
3336 block and update life information at the end. */
3337 SET_BIT (blocks, bb->index);
3338 changed = true;
3339
3340 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3341 peep2_insn_data[i].insn = NULL_RTX;
3342 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3343#else
3344 /* Back up lifetime information past the end of the
3345 newly created sequence. */
3346 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3347 i = 0;
3348 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3349
3350 /* Update life information for the new sequence. */
3351 x = try;
3352 do
3353 {
3354 if (INSN_P (x))
3355 {
3356 if (--i < 0)
3357 i = MAX_INSNS_PER_PEEP2;
3358 peep2_insn_data[i].insn = x;
3359 propagate_one_insn (pbi, x);
3360 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3361 }
3362 x = PREV_INSN (x);
3363 }
3364 while (x != prev);
3365
3366 /* ??? Should verify that LIVE now matches what we
3367 had before the new sequence. */
3368
3369 peep2_current = i;
3370#endif
3371
3372 /* If we generated a jump instruction, it won't have
3373 JUMP_LABEL set. Recompute after we're done. */
3374 for (x = try; x != before_try; x = PREV_INSN (x))
3375 if (GET_CODE (x) == JUMP_INSN)
3376 {
3377 do_rebuild_jump_labels = true;
3378 break;
3379 }
3380 }
3381 }
3382
3383 if (insn == bb->head)
3384 break;
3385 }
3386
3387 free_propagate_block_info (pbi);
3388 }
3389
3390 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3391 FREE_REG_SET (peep2_insn_data[i].live_before);
3392 FREE_REG_SET (live);
3393
3394 if (do_rebuild_jump_labels)
3395 rebuild_jump_labels (get_insns ());
3396
3397 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3398 we've changed global life since exception handlers are no longer
3399 reachable. */
3400 if (do_cleanup_cfg)
3401 {
3402 cleanup_cfg (0);
3403 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3404 }
3405#ifdef HAVE_conditional_execution
3406 else
3407 {
3408 count_or_remove_death_notes (blocks, 1);
3409 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3410 }
3411 sbitmap_free (blocks);
3412#endif
3413}
3414#endif /* HAVE_peephole2 */
3415
3416/* Common predicates for use with define_bypass. */
3417
3418/* True if the dependency between OUT_INSN and IN_INSN is on the store
3419 data not the address operand(s) of the store. IN_INSN must be
3420 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3421 SETs inside. */
3422
3423int
3424store_data_bypass_p (out_insn, in_insn)
3425 rtx out_insn, in_insn;
3426{
3427 rtx out_set, in_set;
3428
3429 in_set = single_set (in_insn);
3430 if (! in_set)
3431 abort ();
3432
3433 if (GET_CODE (SET_DEST (in_set)) != MEM)
3434 return false;
3435
3436 out_set = single_set (out_insn);
3437 if (out_set)
3438 {
3439 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3440 return false;
3441 }
3442 else
3443 {
3444 rtx out_pat;
3445 int i;
3446
3447 out_pat = PATTERN (out_insn);
3448 if (GET_CODE (out_pat) != PARALLEL)
3449 abort ();
3450
3451 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3452 {
3453 rtx exp = XVECEXP (out_pat, 0, i);
3454
3455 if (GET_CODE (exp) == CLOBBER)
3456 continue;
3457
3458 if (GET_CODE (exp) != SET)
3459 abort ();
3460
3461 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3462 return false;
3463 }
3464 }
3465
3466 return true;
3467}
3468
3469/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3470 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3471 or multiple set; IN_INSN should be single_set for truth, but for convenience
3472 of insn categorization may be any JUMP or CALL insn. */
3473
3474int
3475if_test_bypass_p (out_insn, in_insn)
3476 rtx out_insn, in_insn;
3477{
3478 rtx out_set, in_set;
3479
3480 in_set = single_set (in_insn);
3481 if (! in_set)
3482 {
3483 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3484 return false;
3485 abort ();
3486 }
3487
3488 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3489 return false;
3490 in_set = SET_SRC (in_set);
3491
3492 out_set = single_set (out_insn);
3493 if (out_set)
3494 {
3495 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3496 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3497 return false;
3498 }
3499 else
3500 {
3501 rtx out_pat;
3502 int i;
3503
3504 out_pat = PATTERN (out_insn);
3505 if (GET_CODE (out_pat) != PARALLEL)
3506 abort ();
3507
3508 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3509 {
3510 rtx exp = XVECEXP (out_pat, 0, i);
3511
3512 if (GET_CODE (exp) == CLOBBER)
3513 continue;
3514
3515 if (GET_CODE (exp) != SET)
3516 abort ();
3517
3518 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3519 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3520 return false;
3521 }
3522 }
3523
3524 return true;
3525}
Note: See TracBrowser for help on using the repository browser.